Skip to content
Snippets Groups Projects
Commit 70e31bfe authored by borzechof99's avatar borzechof99 :whale2:
Browse files

Merge branch '5-ci-einrichten-linter' into 'master'

Implement PyLint in CI

See merge request swp-unisport/team-warumkeinrust/unisport-o-mat!27
parents aba99400 884c3620
No related branches found
No related tags found
No related merge requests found
......@@ -49,6 +49,45 @@ black:
script:
- black --fast .
#SOURCE: https://pypi.org/project/pylint-gitlab/
pylint:
image: python:latest
stage: test
before_script:
- python -V
- mkdir -p public/badges public/lint
- echo undefined > public/badges/$CI_JOB_NAME.score
- pip install pylint-gitlab
- pip install -r requirements.txt
script:
- pylint --exit-zero --output-format=text $(find -type f -name "*.py" ! -path "**/.venv/**") | tee /tmp/pylint.txt
- sed -n 's/^Your code has been rated at \([-0-9.]*\)\/.*/\1/p' /tmp/pylint.txt > public/badges/$CI_JOB_NAME.score
- pylint --exit-zero --output-format=pylint_gitlab.GitlabCodeClimateReporter $(find -type f -name "*.py" ! -path "**/.venv/**") > codeclimate.json
- pylint --exit-zero --output-format=pylint_gitlab.GitlabPagesHtmlReporter $(find -type f -name "*.py" ! -path "**/.venv/**") > public/lint/index.html
after_script:
- anybadge --overwrite --label $CI_JOB_NAME --value=$(cat public/badges/$CI_JOB_NAME.score) --file=public/badges/$CI_JOB_NAME.svg 4=red 6=orange 8=yellow 10=green
- |
echo "Your score is: $(cat public/badges/$CI_JOB_NAME.score)"
artifacts:
paths:
- public
reports:
codequality: codeclimate.json
when: always
pages:
stage: deploy
image: alpine:latest
script:
- echo
artifacts:
paths:
- public
only:
refs:
- master
test:
# variables:
# DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/$POSTGRES_DB"
......
[MASTER]
disable=line-too-long
"""
Implementation of a rudementary scraping tool
for http://www.buchsys.de for SWP UniSport-O-Mat.
"""
import requests
from bs4 import BeautifulSoup
def fetch_website(url):
"""
Helper function to fetch the content of a website.
......@@ -17,17 +21,21 @@ def fetch_website(url):
# pinpoint the parser only to the section containing the course names and links
return soup.find("dl", {"class": "bs_menu"}).find_all("a", href=True)
except requests.exceptions.RequestException as e:
print(e)
except requests.exceptions.RequestException as err:
print(err)
raise
def scraping(site=None) -> dict:
"""
Returns a dictionary of the form {name: link}, containing the scraped content of https://www.buchsys.de/fu-berlin/angebote/aktueller_zeitraum/index.html, unless another URL is given as an argument.
Returns a dictionary of the form {name: link},
containing the scraped content of
https://www.buchsys.de/fu-berlin/angebote/aktueller_zeitraum/index.html,
unless another URL is given as an argument.
"""
courses = {}
if site == None:
if site is None:
site = "https://www.buchsys.de/fu-berlin/angebote/aktueller_zeitraum/"
website = fetch_website(site)
......
"""
Testing module, yo. Just for the course_scraper.py.
"""
from django.test import TestCase
from course_scraper import fetch_website, scraping
from course_scraper import scraping #, fetch_website
class ScraperTestCase(TestCase):
"""
Just a few tests, so pylint isn't getting a fit.
Because reasons.
"""
def test_returns_dict(self):
"""
Testing return type of scraping().
"""
self.assertIsInstance(scraping(), dict)
def test_dict_not_empty(self):
"""
Testing if dict is not empty.
"""
self.assertTrue(len(scraping()) > 0)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment