diff --git a/thesis/6-Discussion.tex b/thesis/6-Discussion.tex index efe1b29455c4a1bd47aefed244d5c1e654226a84..6e436ceb20b2fae81ad33330bf8dd589fbc4b243 100644 --- a/thesis/6-Discussion.tex +++ b/thesis/6-Discussion.tex @@ -212,32 +212,29 @@ Throughout the thesis, a variety of intriguing questions arose which couldn't be Here, a comprehensive list of all these pointers for possible future research is provided. \begin{enumerate} - \item \textbf{How have edit filters's tasks evolved over time?}: Unfortunately, no detailed historical analysis of the filters was possible, since the database table storing changes to individual filters (\emph{abuse\_filter\_history}) is not currently replicated (see section~\ref{sec:overview-data}). As mentioned in section~\ref{sec:overview-data}, a patch aiming to renew the replication of the table is currently under review~\cite{gerrit-tables-replication}. When a dump becomes available, an extensive analysis (sym) of filter creation and activation patterns, together with .. will be possible (syn). - (Actually there is some historical stuff: e.g. temporal overview of hits, broken down by filter action... Beware however, it is the *current* filter action they were plotted with and it is very possible that the corresponding filters had a different action switched on some time ago. %TODO check whether that's actually true - (or another visibility level, different filter pattern which would've resulted in a different manual tag) - \item \textbf{What are the differences between how filters are governed on EN Wikipedia compared to other language versions?}: Different Wikipedia language versions each have a local community behind them. %TODO quote? - These communities vary widely in their modes of organisation, ..., and values. It would be definitely fascinating to explore differences between filter governance (and what typed of filters are applied) between the different languages. - \item \textbf{Are edit filters a suitable mechanism for fighting harassment?}: Online harassment has been an increasingly important topic since.. %TODO quote ExMachina paper? - It is also a problem recognised and addressed by Wikimedia/the Wikipedian community %TODO see 2015 Harassment survey; is there a newer one? - According to the edit filter noticeboard archives~\cite{Wikipedia:EditFilterNoticeboardHarassment} there have been some attempts to combat harassment by means of filters. - An evaluation of the usefulness and success of the mechanism at this task would be really interesting. - \item \textbf{When an editor (edit filter manager who is also a bot operator) will implement a bot and when a filter} - ethnographic inquiry - \item \textbf{Repercussions on affected editors}: What are the consequences of edit filters on editors whose edits are filtered? Frustration? Allienation? Do they understand what is going on? Or are for example edit filter warnings helpful and the editors appreciate the hints they have been given and use them to improve their collaboration? -\begin{comment} -%TODO where to put this? -Users are urged to use the term "vandalism" carefully, since it tends to offend and drive people away. -("When editors are editing in good faith, mislabeling their edits as vandalism makes them less likely to respond to corrective advice or to engage collaboratively during a disagreement,"~\cite{Wikipedia:Vandalism}) -There are also various complaints/comments by users bewildered that their edits appear on an ``abuse log'' -\end{comment} - \item \textbf{Is it possible to study the filter patterns in a more systematic fashion? What is to be learnt from this?} For example, it comes to attention that a lot of filters target new users: ``!(""confirmed"" in user\_groups)'' is their first condition%is this really interesting? - \item \textbf{(How) has the notion of ``vandalism'' on Wikipedia evolved over time?}: By comparing older and newer filters, or respectively updates in filter patterns we could investigate whether there is a qualitative change in the interpretation of the ``vandalism'' notion on Wikipedia. - \item \textbf{False Positives?}: were filters shut down, bc they matched more False positives than they had real value? - \item \textbf{What are the urgent situations in which edit filter managers are given the freedom to act as they see fit and ignore best practices of filter adoption (i.e. switch on a filter in log only mode first and announce it on the notice board so others can have a look)? Who determines they are urgent?}: I think these cases should be scrutinised extra carefully since ``urgent situations'' have historically always been an excuse for cuts in civil liberties. + \item \textbf{How have edit filters's tasks evolved over time?} Unfortunately, no detailed historical analysis of the filters could be realised, since the database table storing changes to individual filters (\emph{abuse\_filter\_history}) is not currently replicated (see section~\ref{sec:overview-data}). + As mentioned in section~\ref{sec:overview-data}, a patch aiming to renew the replication of the table is currently under review~\cite{gerrit-tables-replication}. + When a dump becomes available, an extensive investigation of filters' actions, creation and activation patterns, as well as patterns they have targeted over time will be possible. + \item \textbf{What proportion of quality control work do filters take over?} Filter hits can be systematically compared with the number of all edits and reverts via other quality control mechanisms. + \item \textbf{Is it possible to study the filter patterns in a more systematic fashion? What can be learnt from this?} For example, it has come to attention that $1/5$ of all active filters discriminate against new users via the \verb|!("confirmed" in user_groups)| pattern. + Are there other tendencies of interest? + \item \textbf{Is there a qualitative difference between the tasks/patterns of public and hidden filters?} According to the guidelines for filter creation, general filters should be public while filters targeting particular users should be hidden. Is there something more to be learnt from an examination of hidden filters' patterns? Do they actually conform to the guidelines? %One will have to request access to them for research purposes, sign an NDA, etc. + \item \textbf{How are false positives handled?} Have filters been shut down regularly, because they matched more false positives than they had real value? Are there big amounts of false positives that corrupt the filters hit data and thus the interpretations offered by the current work? + \item \textbf{To implement a bot or to implement a filter?} An ethnographic inquiry into if an editor is simultaneously an edit filter manager and a bot operator when faced with a new problem, how do they decide which mechanism to employ for the solution? + \item \textbf{What are the repercussions on affected editors?} An ethnographic study of the consequences of edit filters for editors whose edits are filtered. Do they experience frustration or allienation? Do they understand what is going on? Or do they experience for example edit filters' warnings as helpful and appreciate the hints they have been given and use them to improve their collaboration? + \item \textbf{What are the differences between how filters are governed on EN Wikipedia compared to other language versions?} Different Wikipedia language versions each have a local community behind them. + These communities vary, sometimes significantly, in their modes of organisation and values. + It would be very insightful to explore disparities between filter governance and the types of filters implemented between different language versions. + \item \textbf{Are edit filters a suitable mechanism for fighting harassment?} A disturbing rise in online personal attacks and harassment is observed in a variety of online spaces, including Wikipedia~\cite{Duggan2014}. + The Wikimedia Foundation sought to better understand harassment in their projects via a Harassment Survey conducted in 2015~\cite{Wikimedia:HarassmentSurvey}. + According to the edit filter noticeboard archives~\cite{Wikipedia:EditFilterNoticeboardHarassment}, there have been some attempts to combat harassment by means of filters. + The tool is also mentioned repeatedly in the timeline of Wikipedia's Community Health Initiative~\cite{Wikipedia:CommunityHealthInitiative} which seeks to reduce harassment and disruptive behaviour on Wikipedia. + An evaluation of its usefulness and success at this task would be really interesting. + \item \textbf{(How) has the notion of ``vandalism'' on Wikipedia evolved over time?} By comparing older and newer filters, or respectively updates in filter patterns, it could be investigated whether there has been a qualitative change in the interpretation of the ``vandalism'' notion on Wikipedia. + \item \textbf{What are the urgent situations in which edit filter managers are given the freedom to act as they see fit and ignore best practices of filter adoption?} (i.e. switch on a filter in log only mode first and announce it on the notice board so others can have a look)? Who determines they are urgent? These cases should be scrutinised extra carefully since ``urgent situations'' have historically always been an excuse for cuts in civil liberties. %* is there a qualitative difference between complaints of bots and complaints of filters? - \item \textbf{Is there a qualitative difference between the tasks/patterns of public and hidden filters?}: We know of one general guideline/rule of a thumb (cite!) according to that general filters are to be public while filters targeting particular users are hidden. Is there something more to be learnt from an actual examination of hidden filters? One will have to request access to them for research purposes, sign an NDA, etc. - \item \textbf{Do edit filter managers specialize on particular types of filters (e.g. vandalism vs. good faith?)} \emph{abuse\_filter\_history } table is needed for this - \item \textbf{What proportion of quality control work do filters take over?}: compare filter hits with number of all edits and reverts via other quality control mechanisms - \item \textbf{Do edit filter managers stick to the edit filter guidelines?}: e.g. filters should't be implemented for trivial problems (such as spelling mistakes); problems with specific pages are generally better taken care of by protecting the page and problematic title by the title blacklist; general filters shouldn't be hidden + %\item \textbf{Do edit filter managers specialize on particular types of filters (e.g. vandalism vs. good faith?)} \emph{abuse\_filter\_history } table is needed for this + %\item \textbf{Do edit filter managers stick to the edit filter guidelines?} e.g. filters should't be implemented for trivial problems (such as spelling mistakes); problems with specific pages are generally better taken care of by protecting the page and problematic title by the title blacklist; general filters shouldn't be hidden \end{enumerate} %TODO further points for future study diff --git a/thesis/references.bib b/thesis/references.bib index c0f3b60f9dd9973b87175c0862fa40fba287d8df..55c3447056a879757524473eac60b6f1068fbe27 100644 --- a/thesis/references.bib +++ b/thesis/references.bib @@ -44,6 +44,15 @@ note = {\url{https://journals.sagepub.com/doi/pdf/10.1177/2053951717726554}} } +@article{Duggan2014, + title = {Online harassment}, + author = {Duggan, Maeve and Rainie, Lee and Smith, Aaron and Funk, Cary and Lenhart, Amanda and Madden, Mary}, + year = {2014}, + month = {October}, + publisher = {Pew Research Center}, + note = {\url{https://www.pewinternet.org/2014/10/22/online-harassment/}} +} + @misc{Elder2016, title = {Inside the game of sports vandalism on {W}ikipedia}, author = {Elder, Jeff}, @@ -489,6 +498,15 @@ note = {\url{https://repository.upenn.edu/cgi/viewcontent.cgi?article=1490&context=cis_papers}} } +@misc{Wikimedia:HarassmentSurvey, + key = "Wikimedia Harassment Survey", + author = {}, + title = {Wikimedia Foundation: Harassment Survey}, + year = 2015, + note = {Retreived 24 July 2019 from + \url{https://upload.wikimedia.org/wikipedia/commons/5/52/Harassment_Survey_2015_-_Results_Report.pdf}} +} + @misc{Wikimedia:Mission, key = "Wikimedia Mission", author = {}, @@ -642,6 +660,15 @@ \url{https://en.wikipedia.org/w/index.php?title=User:ClueBot_NG&oldid=391868393}} } +@misc{Wikipedia:CommunityHealthInitiative, + key = "Wikipedia Community Health Initiative", + author = {}, + title = {Wikipedia: Community Health Initiative}, + year = 2019, + note = {Retreived 24 July 2019 from + \url{https://en.wikipedia.org/w/index.php?title=Wikipedia:Community_health_initiative&oldid=905253115}} +} + @misc{Wikipedia:GoodFaith, key = "Wikipedia Assume Good Faith", author = {},