Transclusion guessing from TOC
editn = ;
s =``.replaceAll("\t", "\n").split("\n");
e = []; p = [];
for (let i=0;i<s.length;i++) {
if (i%2) {
p.push(Number(s[i])+n)
} else
e.push(s[i].trim())
};
for (let i=0;i<p.length;i++) {
e[i] += "/" + p[i]+"-";
if (i<p.length-1) {
if (p[i+1] == p[i]) {
e[i] += p[i] + " a"
} else {
e[i] += p[i+1]-1
}
};
if (i && e[i-1].slice(-1) == "a")
e[i] += " b";
e[i] = e[i].replace(/\/(\d+)-\1($| )/, "/$1$2")
};
console.log(e.join("\n"))
Works update
edit// do the first line, then preview, then do the rest
$("#wpTextbox1").val($("#wpTextbox1").val().replaceAll(/(?<=\|)\d+(?=\||\}\})/g, "")); // empty all
l = $(".wst-progress-bar");
l = l.map(x => l[x].dataset);
n = [];
for (let i=0;i<l.length;i++) {
y = l[i];
f = x => x=="0"?"":x;
n.push([f(y.total), f(y.q4), f(y.q3), f(y.q2), f(y.q1), f(y.q0)])}
for (let x of n) {
$("#wpTextbox1").val(
$("#wpTextbox1").val()
.replace(/(\{\{User:Alien333\/Poemrow\|.*?)\|\|\|\|\|\}\}/, "$1"+x.join("|")+"}}") // inject data
)
}
Mass actions
editmassaction = (a, l, r, t, s={}) => {
let api = new mw.Api();
let f = (i) => {
if (i < l.length) {
let base = {
action: a,
title: t[i],
reason: r,
token:t,
};
let par = { ...base, ...s };
api.post(par).done((a)=>console(a));
f(i+1);
}
};
f(0);
}
l.forEach((s) => api.post({action:"delete", reason:r, title:s,token:token}).done(console.log).fail(console.log))
Deleting Index: & all Page:s
editindex = ""; // index name, without ns prefix
n = ; // largest page number
l = Array.from(new Array(n), (x, i) => "Page:"+index+"/"+(i+1));
massaction("delete", l,[[WS:PD#Index:"+index+"|WS:PD discussion]]", mw.user.tokens.get("csrfToken"));
all poems in a Poems
edit<templatestyles src="User:Alien333/NoHeaderOrSimilar.css"/>
{{#invoke:String|replace|{{#invoke:Parse|main|{{#invoke:Transclude list|main|Poems {{#invoke:String|replace|{{#invoke:String|replace|{{#invoke:String|sub|{{User:Alien333/Expand list|{{{1}}}}}|2|-2}}|
||all|false}}|{{!}}(|;Poems (}}}}}}|{{!}}|{{ppb}}}}
Getting hathi scan images
edit(The delays are TBD. It's sure that it works with 2000, but might be able to put less.)
next = $(`[aria-label="Next Page"]`);
down = $("p.mb-3 button");
page = $("#toolbar-seq");
page.value = "1";
page.dispatchEvent(new KeyboardEvent('blur'));
n = Number(page.parentElement.children[3].innerHTML);
f = (i) => {if (i == n+1) return; down.click(); setTimeout(() => {next.click();setTimeout(() => f(i+1), 1000)}, 1000)};
setTimeout(() => f(1), 1000);
Or, another way, which is probably better:
n = Number(page.parentElement.children[3].innerHTML);
f = (i) => {
if (i == n+1) return;
p = $("#p"+i); // page parent item
console.log(i);
p.scrollIntoView();
setTimeout(() => { // give image time to load
x = $("#p"+i+" figure div img"); // img element
canvas = document.createElement("canvas"); // draw it on a canvas
document.body.appendChild(canvas);
context = canvas.getContext("2d");
canvas.height = x.clientHeight;
canvas.width = x.clientWidth;
context.drawImage(x, 0, 0);
setTimeout(()=>{ // (giving it time)
url = (x.src.startsWith("blob:"))?canvas.toDataURL("image/jpg"):(x.src); // to be able to get a file from the blob: url
a = document.createElement("a"); // need to create a link to fake a click on it
a.href = url;
a.download = i;
a.target = "_blank";
document.body.appendChild(a);
a.click()},2000)
f(i+1);},2000)
}
f(1);
333Bot
editimport pywikibot as pwb
from pywikibot.pagegenerators import SearchPageGenerator
import re
enws = pwb.Site("en", "wikisource")
wd = enws.data_repository()
gen = SearchPageGenerator("insource:/wikidata *= *Q[0-9]+/", site=enws, namespaces=[102,100,0])
for page in gen:
title = page.title()
content = page.text
try:
page.data_item() # means that it's correctly linked and we don't care
except:
qid = re.match(r"(.|\n)*wikidata *= *(Q[0-9]+)", content, re.M).groups()[1]
item = pwb.ItemPage(wd, qid)
try:
item.getSitelink("enwikisource") # the item already has another enws sitelink
except:
try:
item.setSitelink(sitelink={"site":"enwikisource", "title":title}, summary="added missing sitelink to English Wikisource (bot)")
print("Added sitelink "+title+" to the item "+qid)
except:
print("Error: Addition of sitelink "+title+" to the item "+qid+" was skipped")
print("All pages done.")
Transclusion arguments guessing
editExtended content
|
---|
n = ;
index = "Poems djvu";
s = new Array(n);
api = new mw.Api();
for (let i=1;i<n;i++) {
api.get({action:"query", prop:"revisions", rvprop:"content", rvslots:"main", titles:"Page:"+index+"/"+i, format:"json"}).done(q => {for (let o in q.query.pages) {s[i]=(q.query.pages[o])} })
}
e = [];
p = [];
istitle = /\{\{tpp\|/;
capitalize = s => s[0].toUpperCase()+s.slice(1);
rmpunct = s => s.replace(/[\.,;!\?]$/, "");
s.forEach(x => {
content = x.revisions[0].slots.main["*"];
if (istitle.test(content)) {
title = /\{\{tpp\|([^\|\{\n]*)/s.exec(content);
title = capitalize(rmpunct(title[1].split("\n")[0]).toLowerCase()).replaceAll(target, rep);
number = Number(/\d+$/.exec(x.title)[0]);
e.push(title);
p.push(number);
}
})
se = new Set(e);
dups = [];
for (let i=0;i<e.length;i++) {
if (se.has(e[i])) {
se.delete(e[i]);
} else {
dups.push(i);
}
}
console.log(e.map(s => "* [[/"+s+"/]]").join("\n"))
for (let i=0;i<p.length;i++) {
e[i] += "/" + p[i] + "-";
if (i) {
e[i-1] += p[i]-1;
}
}
console.log(e.join("\n").replaceAll(/\/(\d+)-\1/g, "/$1"))
|