User:Theleekycauldron/DYKViews.js

From Wikipedia, the free encyclopedia
Note: After saving, you have to bypass your browser's cache to see the changes. Google Chrome, Firefox, Microsoft Edge and Safari: Hold down the ⇧ Shift key and click the Reload toolbar button. For details and instructions about other browsers, see Wikipedia:Bypass your cache.
let output = []; //final table result
let papp = []; //explanation of the app/papp system is below
let pappis = [];
let summary = [];
let rapagename;
let cowboy = String.fromCodePoint("129312");
importScript('User:Theleekycauldron/DYK credit finder.js');
importScript('User:Theleekycauldron/DYK stats notifier.js');
let threshold = [600,1000]; //non-imaged threshold, imaged threshold
try {
	let api = new mw.Api();
} catch (SyntaxError) {}
let months = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]; //this one seems self-explanatory
let dict = { //zero-padding months of the year
	"December":  "11",
	"November":  "10",
	"October":   "09",
	"September": "08",
	"August":    "07",
	"July":      "06",
	"June":      "05",
	"May":       "04",
	"April":     "03",
	"March":     "02",
	"February":  "01",
	"January":   "00"
}
function httpGet(theUrl) { //from StackOverflow; puts a GET request to an API (we'll use it to query pageviews)
    var xmlHttp = new XMLHttpRequest(); //it does stuff
    xmlHttp.open( "GET", theUrl, false ); // false for synchronous request
    xmlHttp.send( null );
    return xmlHttp.responseText;
}
function capitalizeFirstLetter(string) { //capitalizes first letter of a string
  return string.charAt(0).toUpperCase() + string.slice(1);
}

function doSomethingInCaseOfError () {
    /* .. */
    console.log( 'err' );
}

function goBack(obj){
	month = obj.getMonth()+1;
	return obj.getFullYear().toString()+month.toString().padStart(2,"0")+obj.getDate().toString().padStart(2,"0")+"00";
}

Date.prototype.addDays = function(days) {
	var date = new Date(this.valueOf());
	date.setDate(date.getDate() + days);
	return date;
}

function randint(max) {
  return Math.floor(Math.random() * max);
}

async function getDYKViews(rpn,jitter,edit) {
    output = []
    papp = [];
	pappis = [];
	summary = [];
	rapagename=rpn;
    console.log(rapagename);
	let t1 = [];
	let t2 = []	
	let v2 = api.get( {
		prop: 'revisions',
		rvprop: 'content',
		rvlimit: 1,
		indexpageids: true,
		titles: rapagename
	} )
	.then( function ( data ) {
		var q = data.query,
			id = q && q.pageids && q.pageids[0],
			pg = id && q.pages && q.pages[ id ],
			rv = pg && pg.revisions;
		if ( rv && rv[0] && rv[0]['*'] ) {
			t2 = rv[0]['*'];
		}
	} ).fail( doSomethingInCaseOfError );
	
	if (rapagename != "Wikipedia:Recent_additions"){
		let s = rapagename.split("/");		
		let d = new Date(parseInt(s[1]),parseInt(dict[s[2]])+1,1);
		let d1 = Date.UTC(d.getUTCFullYear(),d.getUTCMonth(),1);
		var cd = new Date(); 
		var cd1 = Date.UTC(cd.getUTCFullYear(),cd.getUTCMonth(),1);
		let str = "";
		if (cd1 == d1){
			str = "Wikipedia:Recent_additions";
		} else {
			str = "Wikipedia:Recent_additions/"+d.getFullYear()+"/"+months[d.getMonth()];
		}
		v1 = api.get( {
			prop: 'revisions',
			rvprop: 'content',
			rvlimit: 1,
			indexpageids: true,
			titles: str
		} )
		.then( function ( data ) {
			var q = data.query,
				id = q && q.pageids && q.pageids[0],
				pg = id && q.pages && q.pages[ id ],
				rv = pg && pg.revisions;
			if ( rv && rv[0] && rv[0]['*'] ) {
				te = rv[0]['*'];
				te = te.substring(te.lastIndexOf("*\'\'\'\'\'"));
				te = "===1 "+months[d.getMonth()]+" "+d.getFullYear()+"===\n"+te;
				t1 = te;
			}
		} ).fail( doSomethingInCaseOfError );
		await Promise.all([v1,v2]);
	} else {
		await v2;
	}
	tt = t1 + "\n" + t2;
	getData(tt,jitter);
	output.sort(function(a, b) {
	  return b[1] - a[1];
	});
	let pr = "{{DYK stats table|";
	output.forEach((item) => pr += "\n"+item[0]);
	pr += "\n}}";
	pr = pr.replaceAll(/File:/g,""); //:)
	if (edit){
		writeToStats(pr);
		var i=0;
		let monthyear = getmonthyear();
		for (row of output){
			var info = row[0].split("|");
			if (info[0] == "{{DYK stats table row"){
				user = await find_contrib_data([info[1]]);
				if (user[info[1]].length>0 && (rapagename=="Wikipedia:Recent_additions") && info[5]>=[+info[2]!==""]){
					notify(user[info[1]][0].replace("User talk:",""),info[1],info[4],info[5],monthyear);
				}
			}
		}
	} else {
		console.log(pr);
	}	
}
function getData(wikitext,jitter) { //parses a WP:Recent additions page into files, bolded articles, pageviews, and timestamps
    const splitLines = str => str.split(/\r?\n/); //function to split lines of string
    wikiarr = splitLines(wikitext); //splits wikitext by lines
    let date; //we'll use this to save timestamps later
    let r = []; //i have literally no idea what this does and i'm not willing to risk deleting it
    let multis = []; //an index of all the multi-hooks in a given set, since the app below doesn't care where it finds bolded articles in a set
    let app = []; //an "app" is the standard way of codifying a set of hooks; it contains the image filename, the timestamp, and several arrays containing data on hook performance by bolded article
    let pmultis = []; 
    papp = []; //pmultis and papp, or "previous multis" and "previous app" are a lagging indicator, to store the previous (i.e. later, since we iterate backwards from the end of the month) app and multi. We actually end up processing papps instead of apps; the papp comes after, so by subtracting the papp's timestamp from the app's, we can figure out how long the papp was on the main page
	let d;
	let setNum=0; //cheaty way of figuring out what set we're on
    for (var i=0; i<wikiarr.length; i++){ //iterate through every line of wikitext
		//the if statement here is trying to figure out what kind of wikitext we're dealing with here; if it's a hook, image, or timestamp, it needs to be processed; if not, it's water under the bridge
        if (wikiarr[i].includes(" (UTC)'''")) { //a line that has " (UTC)" in it is probably a timestamp, and every set of hooks is timestamped when it leaves the main page
			setNum++;
			let j=i+1;
			while (!wikiarr[j].includes(" (UTC)'''")){ //looking for the next timestamp down, e.g. when the set before was taken off the main page, tells us when this set was put on the main page; this while loop searches for the next timestamp in the wikitext
				j++;
				if (j == wikiarr.length){ //if we reach the end of the wikitext, then there's no point in looking for another timestamp, since the bottom set of a DYKA page actually belongs to the previous month
					j=i;
					break;
				}
			}
			d = convertToDate(wikiarr[j].substring(6,wikiarr[j].length-11)); //Grabbing the timestamp from inside the string
			date = [goBack(d.addDays(1)),goBack(d.addDays(-2))]; //setting the parameters in which we look for pageviews
            if (app.length > 0){ // when we come across a timestamp, we're starting a new set; before we dig into the new set, we want to process the set we just parsed and the one before it, so this is where we send it off to the processor
                if (papp.length > 0){ //but if this the first timestamp we come across, we just want to make sure this set is ready to process for the next time around; we can't compare it until we have the next (i.e. prior) set. So we only process the papp if we have both a papp and and app
                    processData(papp,app,pmultis);
                }
                pmultis = JSON.parse(JSON.stringify(multis)); //app becomes papp, multis becomes pmultis, freeing up app and multis for the next set; once we have a new app and multis, we can process the new papp and new pmultis
                papp = JSON.parse(JSON.stringify(app));
                app = [];
                multis = [];
            }
            app.push(wikiarr[i].substring(6,wikiarr[i].length-11)); //save the timestamp to the current app
        } else if (wikiarr[i].includes("{{main page image")){ //if this line of wikitext is the image file for a set, we're also gonna want to save this to the app, in case we need to put it in the stats table later
            let sub = wikiarr[i].split("|")[1]; //splits the wikitext line on a pipe; the second item in the new array should be our filename
			if (sub.includes("File:")){ //this is SUPPOSED to scrub the "File:" from the filename, but that doesn't freaking work; whatever, i'll put that in somwhere else
                sub = sub.substring(5);
            }
            app.push(sub.substring(sub.indexOf("=")+1)); //push the filename into the app
        } else if (wikiarr[i].substring(0,5) == "* ..."){ //if the wikitext line is a hook; if it's not a timestamp, file, or hook, we don't actually care what it is
            let res = [/[^\]]'''(?:| |{{lang\|..\|)\[\[([^\]\|]+)/g,/\[\[[^\|]+\|'''([^']+)''']]/g]; //... idk what it does honestly, i just fell asleep on my keyboard and now this mash looks for bolded articles in hooks (it looks for the '''[[Article]]''' wrapper)
            let matches = []; //save all bolded articles into an array
			let match;
			for (re of res){
				while ((match = re.exec(wikiarr[i])) !== null){
					matches.push(match[1]);
				}
			}
			if (matches.length == 0){
				console.log(wikiarr[i])
			}
			let url, result, parsed;
			try {
				for (var j=0; j<matches.length; j++) { //search each match for pageviews
					//parsing the match to make sure we get the correct article title, to feed into the query
					matches[j] = capitalizeFirstLetter(matches[j]); //capitalize the first letter of the matches, in case we got '''[[article]]'''
					//querying wikimedia's pageviews api
					////the URL that we use to make the request; we're looking for pageviews only made by users, we put the article title in underscores (e.g. Josiah Bartlet -> Josiah_Bartlet) and sanitize any ?s and /s, feed our dates in as yyyymmdd00 (from the DYK date and two days prior), and if this is on the most recent recent additions page, we also want to jitter the request to make sure the api doesn't cache a wrong answer and sleep on the job; putting in a random parameter from 1 to 1000 ensures that the api generates a fresh response every time
					url = `https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/user/${matches[j].replaceAll(/ /g,"_").replaceAll(/&nbsp;/g,"_").replaceAll("/","%2F").replaceAll("?","%3F")}/daily/${date[1]}/${date[0]}${((((rapagename=="Wikipedia:Recent_additions")||jitter)&&(setNum<3))?("?max-age="+randint(1000)):"")}`;
					result = httpGet(url); //push the URL to a get request, store the output in "result"
					result = JSON.parse(result); //use JSON.parse on the result
					parsed = [matches[j]] //"parsed" is going to store the name of the article, the pageviews, and the hook
					let dstr = goBack(d);
					let views = result.items.map(item => item.views);
					result.items.forEach(item => views.push(item.views)); //push the number of views from every day into parsed, because we get a bunch of other stuff, too
					let k = result.items.map(item => item.timestamp).indexOf(dstr); //index of the DYK date
					if (k<2){ //if it's found below index two, pad zeros to get it there
						for (var l=0; l<2-k; l++){views.splice(0,0,0)}
					} else { //if it's found above/at index two, lop off everything more than two behind it
						views = views.slice(k-2);
					}
					if (views.length > 4){ //
						views = views.slice(0,4);
					}
					while (views.length < 4){
						views.push(0);
					}
					if (views[2]-(views[0]+views[1])/2 > views[2]-(views[1]+views[3])/2){
						parsed = parsed.concat(views.slice(0,3))
					} else {
						parsed = parsed.concat([views[1],views[3],views[2]]);
					}
					parsed.push(wikiarr[i].substring(2));//push the hook into the parsed
					app.push(parsed); //push the parsed into the app
				}
			} catch(err) { //if we encounter an error, spit out some quick information
				console.log(err);
				console.log(wikiarr[i]);
				console.log(url);
				console.log(result);
				console.log(matches)
				console.log("error");
				
			}
            if (matches.length > 1){ //if this hook is a multi, put it in the larger array "multis" for the entire set
                multis.push(matches);
            }
        }
    }
    if (app.length > 0){ //process the last papp
        if (papp.length > 0){
            processData(papp,app,pmultis);
        }
        papp = JSON.parse(JSON.stringify(app));
        app = [];
        multis = [];
    }
}

function cleanNumber(n){
	return numberWithCommas((Math.round(n*10)/10).toFixed(1))
}

function summaryTable(){
	summary.sort(function(a, b){return a.vph - b.vph});
	function stdev(array){
		const n = array.length;
		const mean = array.reduce((a, b) => a + b) / n;
		return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n);
	}
	function mean(array){
		return array.reduce((a, b) => a + b) / array.length;
	}
	function median(array){
		//already sorted
		let id1 = Math.floor((array.length-1)/2)
		let id2 = Math.floor(array.length/2)
		return [(array[id1]+array[id2])/2,id1,id2];
	}
	function index(n){
		return summary.findIndex(object => {
			return object.vph === n;
		});
	}
	let imagedarr = summary.filter(a => a.imaged);
	let nonimagedarr = summary.filter(a => !a.imaged)
	let imagedvph = imagedarr.map(a => a.vph);
	let nonimagedvph = nonimagedarr.map(a => a.vph);
	let total = summary.map(a => a.vph);
		
	let tables = "==To main summary page==\n{{DYK stats monthly summary table|"	
	let lows = [Math.min(...nonimagedvph),Math.min(...imagedvph),Math.min(...total)];
	let lowlabels = [];
	for (var i=0; i<3; i++){
		var arr = [nonimagedarr,imagedarr,summary][i];
		var indexarr = [nonimagedvph,imagedvph,total][i];
		lowlabels.push(arr[indexarr.indexOf(lows[i])]["article"]);
	}
	lows = lows.map(a => cleanNumber(a));
	tables += "\n{{DYK stats monthly summary table row|Low|"+lows[0]+"|"+lowlabels[0]+"|"+lows[1]+"|"+lowlabels[1]+"|"+lows[2]+"|"+lowlabels[2]+"}}";
	
	let medians = [median(nonimagedvph),median(imagedvph),median(total)];
	let medianlabels = [];
	for (var i=0; i<3; i++){
		var arr = [nonimagedarr,imagedarr,summary][i];
		let addendum = (medians[i][1]!=medians[i][2])?("<br/>"+arr[medians[i][2]]["article"]):("");
		medianlabels.push((arr[medians[i][1]]["article"])+addendum);
	} 
	medians = medians.map(a => cleanNumber(a[0]));
	tables += "\n{{DYK stats monthly summary table row|Median|"+medians[0]+"|"+medianlabels[0]+"|"+medians[1]+"|"+medianlabels[1]+"|"+medians[2]+"|"+medianlabels[2]+"}}";
	
	let highs = [Math.max(...nonimagedvph),Math.max(...imagedvph),Math.max(...total)];
	let highlabels = [];
	for (var i=0; i<3; i++){
		var arr = [nonimagedarr,imagedarr,summary][i];
		var indexarr = [nonimagedvph,imagedvph,total][i];
		highlabels.push(arr[indexarr.indexOf(highs[i])]["article"]);
	}
	highs = highs.map(a => cleanNumber(a));
	tables += "\n{{DYK stats monthly summary table row|High|"+highs[0]+"|"+highlabels[0]+"|"+highs[1]+"|"+highlabels[1]+"|"+highs[2]+"|"+highlabels[2]+"}}";

	tables += "\n}}"
	console.log("Lows:",lows);
	console.log("Medians:",medians);
	console.log("Highs:",highs);
	
	let monthyear = getmonthyear();
	statscounts = [nonimagedvph.filter(a => a>threshold[0]).length,imagedvph.filter(a => a>threshold[1]).length]
	let summaryrow = "==To total table==\n<noinclude>This row is transcluded to [[Wikipedia:Did you know/Statistics/Monthly summary statistics]].\n{|class=\"wikitable\"</noinclude>\n|-";
	let totalcount = [total.length,statscounts[0]+statscounts[1]];
	summaryrow += "\n| [["+rapagename+"|"+monthyear+"]]";
	summaryrow += "\n| "+totalcount[0];
	summaryrow += "\n| "+totalcount[1];
	summaryrow += "\n| "+cleanNumber(totalcount[1]/totalcount[0]*100)+"%";
	summaryrow += "\n| "+lows[2];
	summaryrow += "\n| "+lowlabels[2];
	summaryrow += "\n| "+medians[2];
	summaryrow += "\n| "+medianlabels[2];
	summaryrow += "\n| "+highs[2];
	summaryrow += "\n| "+highlabels[2];
	summaryrow += "\n<noinclude>|}</noinclude>";
	
	let imagedsummaryrow = "==To imaged table==\n<noinclude>This row is transcluded to [[Wikipedia:Did you know/Statistics/Monthly summary statistics/Imaged]].\n{|class=\"wikitable\"</noinclude>\n|-";
	let imagedcount = [imagedvph.length,statscounts[1]];
	imagedsummaryrow += "\n| [["+rapagename+"|"+monthyear+"]]";
	imagedsummaryrow += "\n| "+imagedcount[0];
	imagedsummaryrow += "\n| "+imagedcount[1];
	imagedsummaryrow += "\n| "+cleanNumber(imagedcount[1]/imagedcount[0]*100)+"%";
	imagedsummaryrow += "\n| "+lows[1];
	imagedsummaryrow += "\n| "+lowlabels[1];
	imagedsummaryrow += "\n| "+medians[1];
	imagedsummaryrow += "\n| "+medianlabels[1];
	imagedsummaryrow += "\n| "+highs[1];
	imagedsummaryrow += "\n| "+highlabels[1];
	imagedsummaryrow += "\n<noinclude>|}</noinclude>";
	
	let nonimagedsummaryrow = "==To non-imaged table==\n<noinclude>This row is transcluded to [[Wikipedia:Did you know/Statistics/Monthly summary statistics/Non-imaged]].\n{|class=\"wikitable\"</noinclude>\n|-";
	let nonimagedcount = [nonimagedvph.length,statscounts[0]];
	nonimagedsummaryrow += "\n| [["+rapagename+"|"+monthyear+"]]";
	nonimagedsummaryrow += "\n| "+nonimagedcount[0];
	nonimagedsummaryrow += "\n| "+nonimagedcount[1];
	nonimagedsummaryrow += "\n| "+cleanNumber(nonimagedcount[1]/nonimagedcount[0]*100)+"%";
	nonimagedsummaryrow += "\n| "+lows[0];
	nonimagedsummaryrow += "\n| "+lowlabels[0];
	nonimagedsummaryrow += "\n| "+medians[0];
	nonimagedsummaryrow += "\n| "+medianlabels[0];
	nonimagedsummaryrow += "\n| "+highs[0];
	nonimagedsummaryrow += "\n| "+highlabels[0];
	nonimagedsummaryrow += "\n<noinclude>|}</noinclude>";
	
	return [tables,summaryrow,imagedsummaryrow,nonimagedsummaryrow].join("\n");
}

function processData(papp,app,multis){
    if (papp[2].length != 5){
        return null;
    }
    hours = subtract(app[0],papp[0]);
    let basefile = "[[File:{image}|100x100px]]"
    let baseviewsurl = "https://pageviews.toolforge.org/?project=en.wikipedia.org&platform=all-access&agent=user&redirects&start={startdate}&end={enddate}&pages={page_}"
	let base = "{{DYK stats table row|{page}|{file}|{date}|{views}|{vph}|{hook}{|b}}}";
	//let basemultibegin = "{{DYK stats table multi begin|{page}|{num}|{file}|{date}|{views|{vph}|{hook}}}";
	//let basemulti = "{{DYK stats table multi|{page}|{date}|{views}|{vph}}}";
	//let basetotal = "{{DYK stats table multi total|{views}|{vph}}}";
    let hookNames = [];
    papp.forEach((item) => hookNames.push(item[0]));
    for (let i=2; i<papp.length; i++) {
        
        let str = base;
        let strfile = basefile;
        let strviewsurl = baseviewsurl;
        let a = -1;
        for (let j=0; j<multis.length; j++){
            if (multis[j].includes(papp[i][0])){
                a = j;
                break;
            }
        }
        if (a != -1) { //multi hook handling
            rs = [];
            for (let j=i; j<i+multis[a].length; j++){
                rs.push(calculateViews(papp[j],hours,i==2));
            }
			vphs = [];
			rs.forEach((item) => vphs.push(item[1]));
			vs = [];
			rs.forEach((item) => vs.push(item[0]));
            const reducer = (previousValue, currentValue) => previousValue + currentValue;
			
            let sumh = vphs.reduce(reducer);
			let sum = vs.reduce(reducer);
			let res = "";
			console.log(multis[a],vphs,(Math.round(sumh*10)/10).toFixed(1),(sumh >= threshold[+(i==2)])?cowboy:"");
			summary.push({article: multis[a].map(n => "[["+n+"]]").join(", "), vph: sumh, imaged: i==2})
			strs = Array.apply(null, Array(multis[a].length+1)).map(_ => "{{DYK stats table multi|");
			strs[0] = "{{DYK stats table multi begin|";
			strs[strs.length-1] = "{{DYK stats table multi total|";
			strs[0] += papp[i][0]+"|";
			if (i==2){
				strs[0] += multis[a].length+"|"+papp[1]+"|";
			} else {
				strs[0] += multis[a].length+"||";
			}
			let date = convertToDate(papp[0]);
			let datestring = `${date.getUTCFullYear()}-${("0" + (date.getUTCMonth()+1)).slice(-2)}-${("0" + date.getUTCDate()).slice(-2)}`;
			strs[0] += datestring + "|";
			strs[0] += numberWithCommas(rs[0][0]) + "|";
			strs[0] += cleanNumber(rs[0][1]) + ((rs[0][2])?("|b="+cleanNumber(rs[0][3])):"") + "|";
			strs[0] += papp[i][papp[i].length-1] +  "}}";
			for (let j=i+1; j<i+multis[a].length; j++){
				strs[j-i] += papp[j][0]+"|";
				strs[j-i] += datestring+"|";
				strs[j-i] += numberWithCommas(rs[j-i][0]) + "|";	
				strs[j-i] += cleanNumber(rs[j-i][1]) + ((rs[j-i][2])?("|b="+cleanNumber(rs[j-i][3])):"") + "}}";					
			}
			strs[strs.length-1] += numberWithCommas(sum)+"|"+cleanNumber(sumh)+"}}";
			output.push([strs.join("\n"),sumh]);
            i += multis[a].length-1;
        } else { //if hook is single, like me
            let r = calculateViews(papp[i],hours,i==2);
			console.log(papp[i][0]+":",(Math.round(r[1]*10)/10).toFixed(1),(r[1] >= threshold[+(i==2)])?cowboy:"")
			summary.push({article: "[["+papp[i][0]+"]]", vph: r[1], imaged: i==2})
			str = str.replace("{page}",papp[i][0]); //the name of the page, hyperlinked, e.g. "[[Jimmy Carter]]"
			if (i==2) {
				str = str.replace("{file}",papp[1]); //the file to be used on the stats page
			} else {
				str = str.replace("{file}","");
			}
			let date = convertToDate(papp[0]);
			let datestring = date.getUTCFullYear() + "-" + ("0" + (date.getUTCMonth()+1)).slice(-2) + "-" + ("0" + date.getUTCDate()).slice(-2)
			let viewstring = "";
			str = str.replace("{date}",datestring);
			str = str.replace("{views}",numberWithCommas(r[0]));
			str = str.replace("{vph}",cleanNumber(r[1])); //the hook views per hour
			str = str.replace("{hook}",papp[i][papp[i].length-1]); // text of the hook
			if (r[2]){
				str = str.replace("{|b}","|b="+cleanNumber(r[3]));
			} else {
				str = str.replace("{|b}","");
			}
			output.push([str,r[1]]);
        }
        
    }
}
function convertToDate(str){
    str = str.split(", ");
    str[0] = str[0].split(":");
    str[1] = str[1].split(" ");
    str[1][1] = dict[str[1][1]];
    return new Date(str[1][2],str[1][1],str[1][0],str[0][0],str[0][1]);
}
function subtract(stra,strb){
	let dateb = convertToDate(strb);
	let datea = convertToDate(stra);
	if (datea.getDate() != dateb.getDate()){
		dateb.setHours(0);
		dateb.setMinutes(0);
	}
    return (dateb.getTime()-datea.getTime())/(3600000);
}

function indexOfMax(arr) {
    if (arr.length === 0) {
        return -1;
    }
    var max = arr[1];
    var maxIndex = 0;
    for (var i = 1; i < arr.length; i++) {
        if (arr[i] > max) {
            maxIndex = i;
            max = arr[i];
        }
    }
    return maxIndex;
}
function calculateViews(r,h,imaged){
	let t = threshold[+imaged]
    let s = false;
	let v = Math.round(r[3] - (r[1]+r[2])/2);
    let vph = v/h;
	if ((vph<t && r[3]/h>t) || r[3]-v>2048) {
		s = true;
	}
    
    return [v,vph,s,(r[1]+r[2])/2];
}
function numberWithCommas(x) {
    return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
}

async function writeToStats(res){
	let page, topstring;
	let monthyear = rapagename.split("/");
	if (monthyear.length>1){
		page = "Wikipedia:Did you know/Statistics/Monthly DYK pageview leaders/"+monthyear[1]+"/"+monthyear[2];
	} else {
		page = "Wikipedia:Did you know/Statistics/Monthly DYK pageview leaders";
	}
	topstring = "{{Wikipedia:Did you know/Statistics/Tabs|4}}\n{{Wikipedia:Did you know/Statistics/Monthly DYK pageview leaders/Navigation}}\n{{Excerpt|"+page+"/Summary|To main summary page|hat=no}}\n{{clear}}\n==Table==\n";
	res = topstring + res;
	let summarytext = "feedin' the bangtail ([[User:Theleekycauldron/DYKViews.js|jockey]])";
	
	var params = {
			action: 'edit',
			title: page,
			summary: summarytext,
			text: res,
			format: 'json'
		}

	api.postWithToken( 'csrf', params ).done( function ( data ) {
		console.log( data );
	} );
	
	var params = {
			action: 'edit',
			title: page+"/Summary",
			summary: summarytext,
			text: summaryTable(),
			format: 'json'
		}

	api.postWithToken( 'csrf', params ).done( function ( data ) {
		console.log( data );
	} );
}
function getmonthyear(){
	let monthyear;
	if (rapagename == "Wikipedia:Recent_additions"){
		
		if (papp.length == 0){
			throw new Error("Howdy there, pardner! Looks like you've tried to feed the horse empty-handed, and that just don't make jack. Check back tomorruh, maybe we can higgle over something from the hoosegow. Now, pull in your horns and light a shuck, would'ya?")
		} else {
			let date = convertToDate(papp[0]);
			monthyear = months[date.getUTCMonth()] + " " + date.getUTCFullYear();
		}
	} else {
		let s = rapagename.split("/");
		monthyear = s[2] + " " + s[1];
	}
	return monthyear;
}
if (mw.config.get('wgPageName').includes("Wikipedia:Recent_additions")){
    $('#bodyContent').prepend('<button onclick="getDYKViews(mw.config.get(\'wgPageName\'),false,true)">Get views!</button>');
}