web - feliyur/exercises GitHub Wiki

SSL Certificate

One free option is Let's Encrypt.

Renewing

sudo certbot renew

See here

Self-Hosted WebDAV via Docker

https://hub.docker.com/r/bytemark/webdav/

Mediawiki

Running a docker instance

Docs: https://hub.docker.com/_/mediawiki/

Getting an instance up

$ docker run --name some-mediawiki -p 8080:80 -d mediawiki

Open http://localhost:8080 and proceed with setup. Choose SQLite for database if not running a database separately. At the end of the process download the LocalSettings.php file and docker cp it into the container working dir (where index.php resides).

Might need to add to the end of LocalSettings.php:

$wgUrlProtocols[] = "file://";

To this end chown it furst to root:root to make it editable (as well as apt-get update the system and apt-get install vim).

Authentication, retrieving and adding content

Get a login token, token returned in query.tokens.logintoken field.

curl -X POST -c cookies.txt -d "action=query&meta=tokens&type=login&format=json&lgname=Username&lgpassword=UserPassword"   https://<mediawiki address>/api.php
Action Command
Get raw page content (markdown) curl -X GET "https://<my wiki url>/index.php?title=<page title>&action=raw"
import os
import requests
import http.cookiejar
import urllib.parse

# MediaWiki API URL
API_URL = "http://localhost:8080/api.php"

# Path to the cookies.txt file
COOKIES_FILE = 'cookies.txt'

# Retrieve login credentials from environment variables or hardcode them
USERNAME = os.getenv('MEDIAWIKI_USERNAME', 'your_username')
PASSWORD = os.getenv('MEDIAWIKI_PASSWORD', 'your_password')

# Initialize a session with cookie handling
session = requests.Session()

# Use cookiejar to load cookies from cookies.txt
session.cookies = http.cookiejar.MozillaCookieJar(COOKIES_FILE)

# Step 1: Get login token
response = session.get(API_URL, params={
    'action': 'query',
    'meta': 'tokens',
    'type': 'login',
    'format': 'json'
})

# Check if the request was successful
if response.status_code == 200:
    result = response.json()
    login_token = result['query']['tokens']['logintoken']
    print(f"Login Token: {login_token}")
else:
    print(f"Failed to get login token. Error: {response.status_code}")
    exit()

# Step 2: Log in using the login token
login_data = {
    'action': 'clientlogin',
    'format': 'json',
    'username': USERNAME,
    'password': PASSWORD,
    'logintoken': login_token,
    'loginreturnurl': 'http://localhost:8080/'
}

response = session.post(API_URL, data=login_data)

# Check if login was successful
if response.status_code == 200:
    login_result = response.json()
    if 'clientlogin' in login_result and login_result['clientlogin']['status'] == 'OK':
        print("Login successful!")
    else:
        print(f"Login failed. Response: {login_result}")
        exit()
else:
    print(f"Failed to log in. Error: {response.status_code}")
    exit()

# Step 3: Save cookies to cookies.txt after login
session.cookies.save(COOKIES_FILE)

# Step 4: Get CSRF token
response = session.get(API_URL, params={
    'action': 'query',
    'meta': 'tokens',
    'type': 'csrf',
    'format': 'json'
})

# Check if the request for CSRF token was successful
if response.status_code == 200:
    result = response.json()
    csrf_token = result['query']['tokens']['csrftoken']
    print(f"CSRF Token: {csrf_token}")
else:
    print(f"Failed to get CSRF token. Error: {response.status_code}")
    exit()

# Step 5: Create a page using the CSRF token
PAGE_TITLE = "Test Page"
PAGE_CONTENT = "This is the content of the test page."

# URL encode the CSRF token
encoded_csrf_token = urllib.parse.quote_plus(csrf_token)

# Data for creating the page
page_data = {
    'action': 'edit',
    'title': PAGE_TITLE,
    'text': PAGE_CONTENT,
    'token': encoded_csrf_token,
    'format': 'json'
}

response = session.post(API_URL, data=page_data)

# Check if the page was created successfully
if response.status_code == 200:
    result = response.json()
    if 'edit' in result and result['edit']['result'] == 'Success':
        print(f"Page '{PAGE_TITLE}' created successfully!")
    else:
        print(f"Failed to create the page. Response: {result}")
else:
    print(f"Error: {response.status_code}, {response.text}")

Example from mediawiki docu:

https://www.mediawiki.org/wiki/API:Edit

import requests

S = requests.Session()

URL = "https://test.wikipedia.org/w/api.php"

# Step 1: GET request to fetch login token
PARAMS_0 = {
    "action": "query",
    "meta": "tokens",
    "type": "login",
    "format": "json"
}

R = S.get(url=URL, params=PARAMS_0)
DATA = R.json()

LOGIN_TOKEN = DATA['query']['tokens']['logintoken']

# Step 2: POST request to log in. Use of main account for login is not
# supported. Obtain credentials via Special:BotPasswords
# (https://www.mediawiki.org/wiki/Special:BotPasswords) for lgname & lgpassword
PARAMS_1 = {
    "action": "login",
    "lgname": "bot_user_name",
    "lgpassword": "bot_password",
    "lgtoken": LOGIN_TOKEN,
    "format": "json"
}

R = S.post(URL, data=PARAMS_1)

# Step 3: GET request to fetch CSRF token
PARAMS_2 = {
    "action": "query",
    "meta": "tokens",
    "format": "json"
}

R = S.get(url=URL, params=PARAMS_2)
DATA = R.json()

CSRF_TOKEN = DATA['query']['tokens']['csrftoken']

# Step 4: POST request to edit a page
PARAMS_3 = {
    "action": "edit",
    "title": "Project:Sandbox",
    "token": CSRF_TOKEN,
    "format": "json",
    "appendtext": "Hello"
}

R = S.post(URL, data=PARAMS_3)
DATA = R.json()

print(DATA)
⚠️ **GitHub.com Fallback** ⚠️