From f9fcd2d2ad463948a8fe487ab18c877f35d4a98b Mon Sep 17 00:00:00 2001
From: Lyudmila Vaseva <vaseva@mi.fu-berlin.de>
Date: Thu, 25 Jul 2019 11:36:28 +0200
Subject: [PATCH] Refactor conclusion

---
 thesis/conclusion.tex   | 15 ++++++++-------
 thesis/introduction.tex |  2 +-
 thesis/references.bib   | 25 +++++++++++++++++++++++++
 3 files changed, 34 insertions(+), 8 deletions(-)

diff --git a/thesis/conclusion.tex b/thesis/conclusion.tex
index 157ca86..14cac59 100644
--- a/thesis/conclusion.tex
+++ b/thesis/conclusion.tex
@@ -15,14 +15,14 @@ It was further discussed why such an old-school rule-based technology is still a
 Additionally, interesting paths for future research were suggested.
 
 % TODO more detailed overview of results
-Summing up the most prominent results, edit filters are the first mechanism verifying incoming contributions.
+Summing up the most prominent results, edit filters, together with page protection and title/spam blacklist mechanisms, are the first mechanism verifying incoming contributions.
 By acting on unpublished edits they can disallow unconstructive ones directly and thus reduce the workload for other mechanisms.
-At the time of their introduction, the need was felt for a mechanism that swiftly prohibits obvious but difficult to remove vandalism, often caused by the same highly motivated malicious users.
+At the time of their introduction, the need was felt for a mechanism that swiftly prohibited obvious but difficult to remove vandalism, often caused by the same highly motivated malicious users.
 Although mass-scale page moves to nonsenical names could be taken care of by admin bots, edit filters were viewed as a neater solution since this way such edits are not published at all.
 Also, with some dissatisfaction with bots' development processes (poorly tested and not available source code, low responsiveness of some bot operators), the opportunity for a clean start with a new tool was taken.
-Apart from targeting single highly motivated disrupting editors, edit filters take care of ``common newbie mistakes'' such as publishing text not formatted according to wikisyntax or erasing an entire page instead of properly moving the page to a different name, or suggesting it to the formal Articles for Deletion process.
+Apart from targeting single highly motivated disrupting editors, edit filters take care of ``common newbie mistakes'' such as publishing text not formatted according to wikisyntax or erasing an entire page instead of properly moving it to a different name, or suggesting it to the formal Articles for Deletion process.
 By issuing warnings with helpful pointers towards possible alternative actions, edit filters allow a unintentionally disrupting editor to improve their contribution before re-submitting it.
-With feedback provided immediately at publication, the revert-first-ask-questions-later approach of other mechanisms (which frustrates and alienates good intentioned newcomers~\cite{HalGeiMorRied2013}) is inverted.
+With feedback provided immediately at publication, the revert first-ask questions later approach of other mechanisms (which frustrates and alienates good intentioned newcomers~\cite{HalGeiMorRied2013}) is inverted.
 Compared to machine learning techniques, rule-based systems such as the edit filters have the advantage of providing higher amount of control for their operators and being easier to use and understand which also enhances accountability.
 
 
@@ -43,17 +43,18 @@ It is, at the end, easier to maintain power and control in a centralised infrast
 However, centralisation facilitates not only the contribution of everyone towards a common goal—creating the world's biggest knowledge database, but also control.
 It is not an accident that at the very introduction of the AbuseFilter extension, critical voices expressed the concern that a really powerful secret tool was created to which very few people were to have access and thereby a large-scale censorship infrastructure was being installed~\cite{Wikipedia:EditFilterTalkArchive1}.
 If there were multiple comparable projects, all of them had to be censored in order to silence people.
-With Wikipedia being the first go-to source of information for a vast quantity of people all over the world today, the debate whose knowledge is included and who decides what is knowledge worth preserving is essential.
+With Wikipedia being the first go-to source of information for a vast quantity of people all over the world today, the debate whose knowledge is included and who decides what knowledge is worth preserving is essential~\cite{Tkacz2014}.
 In the present moment, it is more relevant than ever:
 In March 2019, the European Parliament basically voted the introduction of upload filters all over the Internet~\cite{EUParliament:Copyright2019}.
+In a way, that is exactly what Wikipedia's edit filters are—they are triggered prior to publication and are able to effectively disallow upload of undesired content.
 
 Since Wikipedia is distinctly relevant for the shaping of public opinion, despite its ``neutral point of view'' policy~\cite{Wikipedia:NeutralPointOfView} it is inherently political.
-At the beginnings of this research, I heard the rumour that there was an edit filter on the German Wikipedia targeting gendering.
+At the beginnings of this research, I heard from a former colleague that there was an edit filter on the German Wikipedia targeting gendering.
 ``To gender'' is a linguistic praxis whereby words referring to people are explicitely marked to designate more genders than the standardly used generic masculine.
 It is a political praxis aiming to uncover under-represented groups and their experiences through the conscious use of language.
 Even though no linguistic norm has established gendering to date, conscious decisions for or against the praxis are political, and so are technologies implementing these decisions.
 As it turned out, no such filter existed on the German Wikipedia
-\footnote{Although, as I have heard from women active in the German Wikipedia community, there is a strong general backlash against gendering. The community is also extremely male dominated.}.
+\footnote{Although, during one of the monthly WomenEdit meetups~\cite{Wikipedia:WomenEdit} hosted at Wikimedia Deutschland office, women active in the German Wikipedia community related that there was a strong general backlash against gendering. The community is also extremely male dominated.}.
 This illustrates a point though:
 Artefacts do have politics and as Lawrence Lessig puts it, it is up to us to decide what values we embed in the systems we create~\cite{Lessig2006}. %TODO Do Artefacts have politics?
 
diff --git a/thesis/introduction.tex b/thesis/introduction.tex
index df60cee..050f4cf 100644
--- a/thesis/introduction.tex
+++ b/thesis/introduction.tex
@@ -152,7 +152,7 @@ The community was willing to disallow this kind of edits from the very start, re
 In addition to disallowing such vandalism, edit filters appear to be applied in ambiguous situations where an edit in itself is disruptive but the motivation of the editor is not clear.
 For example, deleting the entire content of a page could be malignant, but it could also be the result of a new editor not familiar with proper procedures for deleting or moving pages.
 In such cases, the filters take an ``assume good faith'' approach and seek via warning messages to guide the disrupting editor towards transforming their contribution into a constructive one:
-In the page blanking example, a warning contains links to the documentation for redirects and the Articles for Deletion process, and advices the editor to revert the page to the last uncompromised version in case it has been vandalised, and to use the sandbox for test edits.
+In the page blanking example, a warning contains links to the documentation for redirects and the Articles for Deletion process~\cite{Wikipedia:ArticlesForDeletion}, and advices the editor to revert the page to the last uncompromised version in case it has been vandalised, and to use the sandbox for test edits.
 There are also a smaller number of filters which take care of various maintenance tasks—above all tracking a certain bug or other behaviour for further investigation.
 Since the current work is just a first exploration into edit filters, at the end, a comprehensive list of open questions for future research is compiled.
 
diff --git a/thesis/references.bib b/thesis/references.bib
index b2a3e5e..56dec51 100644
--- a/thesis/references.bib
+++ b/thesis/references.bib
@@ -498,6 +498,13 @@
   year = {2001}
 }
 
+@book{Tkacz2014,
+  title = {Wikipedia and the Politics of Openness},
+  author = {Tkacz, Nathaniel},
+  year = {2014},
+  publisher = {University of Chicago Press}
+}
+
 @inproceedings{WestChaVenSokLee2011,
   title = {Link spamming Wikipedia for profit},
   author = {West, Andrew G and Chang, Jian and Venkatasubramanian, Krishna and Sokolsky, Oleg and Lee, Insup},
@@ -608,6 +615,15 @@
                   \url{https://en.wikipedia.org/w/index.php?title=Wikipedia:Administrator_intervention_against_vandalism&oldid=891917401}}
 }
 
+@misc{Wikipedia:ArticlesForDeletion,
+  key =          "Wikipedia Articles for Deletion",
+  author =       {},
+  title =        {Wikipedia: Articles for Deletion},
+  year =         2019,
+  note =         {Retreived July 25 2019 from
+                  \url{https://en.wikipedia.org/w/index.php?title=Wikipedia:Articles_for_deletion&oldid=892360111}}
+}
+
 @misc{Wikipedia:ChenFang,
   key =          "Wikipedia Administrators Noticeboard",
   author =       {},
@@ -1112,6 +1128,15 @@
                   \url{https://en.wikipedia.org/w/index.php?title=Wikipedia:Vandalism_types&oldid=876716354}}
 }
 
+@misc{Wikipedia:WomenEdit,
+  key =          "Wikipedia Women Edit",
+  author =       {},
+  title =        {Wikipedia: Women Edit},
+  year =         2019,
+  note =         {Retreived 25 July 2019 from
+                  \url{https://de.wikipedia.org/w/index.php?title=Wikipedia:WomenEdit&oldid=190243967}}
+}
+
 @misc{Wikipedia:XLinkBot,
   key =          "Wikipedia XLinkBot",
   author =       {},
-- 
GitLab