diff --git a/.travis.yml b/.travis.yml
index d82702a..305a0d9 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,12 +1,13 @@
branches:
- only:
+ only:
- master
language: python
python:
- - "3.5"
- "3.6"
- "3.7"
+ - "3.8"
+ - "3.9"
cache:
- pip
@@ -23,6 +24,7 @@ before_script:
- pip3 install codecov
- pip3 install coveralls
- pip3 install codacy-coverage
+ - pip3 install -r requirements.txt
- sudo apt-get update
# command to run tests
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0fb3fc8..4e1915b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,7 +1,7 @@
# CHANGELOG
-This is a manually generated log to track changes to the repository for each release.
-Each section should include general headers such as **Implemented enhancements**
+This is a manually generated log to track changes to the repository for each release.
+Each section should include general headers such as **Implemented enhancements**
and **Merged pull requests**. Critical items to know are:
- renamed commands
@@ -12,6 +12,7 @@ and **Merged pull requests**. Critical items to know are:
Referenced versions in headers are tagged on Github, in parentheses are for pypi.
## [vxx](https://github.com/urlstechie/urlschecker-python/tree/master) (master)
+ - accelerate code using asyncio and aiohttp (0.0.23)
- updating "whitelist" arguments to exclude (0.0.22)
- adding support for dotfiles for a file type (0.0.21)
- final regexp needs to again parse away { or } (0.0.20)
diff --git a/README.md b/README.md
index c04731a..884f995 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@

-[](https://travis-ci.com/urlstechie/urlchecker-python) [](https://urlchecker-python.readthedocs.io/en/latest/?badge=latest) [](https://codecov.io/gh/urlstechie/urlchecker-python) [](https://www.python.org/doc/versions/) [](https://www.codefactor.io/repository/github/urlstechie/urlchecker-python)  [](https://pepy.tech/project/urlchecker) [](https://github.com/urlstechie/urlchecker-python/blob/master/LICENSE)
+[](https://travis-ci.com/urlstechie/urlchecker-python) [](https://urlchecker-python.readthedocs.io/en/latest/?badge=latest) [](https://codecov.io/gh/urlstechie/urlchecker-python) [](https://www.python.org/doc/versions/) [](https://www.codefactor.io/repository/github/urlstechie/urlchecker-python) [](https://badge.fury.io/py/urlchecker) [](https://pepy.tech/project/urlchecker) [](https://github.com/urlstechie/urlchecker-python/blob/master/LICENSE)
# urlchecker-python
@@ -10,6 +10,11 @@ and then test for and report broken links. If you are interesting in using
this as a GitHub action, see [urlchecker-action](https://github.com/urlstechie/urlchecker-action). There are also container
bases available on [quay.io/urlstechie/urlchecker](https://quay.io/repository/urlstechie/urlchecker?tab=tags).
+## Module Dependencies
+**Versions <= 0.0.22** are built around the [Requests](https://requests.readthedocs.io/en/master/) library whereas
+**versions >= 0.0.23** are built around the [asyncio](https://docs.python.org/3/library/asyncio.html) and the [AIOHTTP](https://docs.aiohttp.org/en/stable/) libraries.
+
+
## Module Documentation
A detailed documentation of the code is available under [urlchecker-python.readthedocs.io](https://urlchecker-python.readthedocs.io/en/latest/)
@@ -88,7 +93,7 @@ optional arguments:
--save SAVE Path to a csv file to save results to.
--retry-count RETRY_COUNT
retry count upon failure (defaults to 2, one retry).
- --timeout TIMEOUT timeout (seconds) to provide to the requests library
+ --timeout TIMEOUT timeout (minutes) to provide to the aiohttp library
(defaults to 5)
```
@@ -121,7 +126,7 @@ $ urlchecker check .
save: None
timeout: 5
- /tmp/urlchecker-action/README.md
+ /tmp/urlchecker-action/README.md
--------------------------------
https://github.com/urlstechie/urlchecker-action/blob/master/LICENSE
https://github.com/r-hub/docs/blob/bc1eac71206f7cb96ca00148dcf3b46c6d25ada4/.github/workflows/pr.yml
@@ -152,7 +157,7 @@ https://github.com/SuperKogito/Voice-based-gender-recognition/issues
https://github.com/buildtesters/buildtest/blob/v0.9.1/.github/workflows/urlchecker.yml
https://github.com/berlin-hack-and-tell/berlinhackandtell.rocks/blob/master/.github/workflows/urlchecker-pr-label.yml
- /tmp/urlchecker-action/examples/README.md
+ /tmp/urlchecker-action/examples/README.md
-----------------------------------------
https://github.com/urlstechie/urlchecker-action/releases
https://github.com/urlstechie/urlchecker-action/issues
@@ -184,7 +189,7 @@ $ urlchecker check --exclude-pattern SuperKogito .
save: None
timeout: 5
- /tmp/urlchecker-action/README.md
+ /tmp/urlchecker-action/README.md
--------------------------------
https://github.com/urlstechie/urlchecker-action/blob/master/LICENSE
https://github.com/urlstechie/urlchecker-action/issues
@@ -212,7 +217,7 @@ https://github.com/berlin-hack-and-tell/berlinhackandtell.rocks/actions?query=wo
https://github.com/USRSE/usrse.github.io
https://github.com/rseng/awesome-rseng/blob/5f5cb78f8392cf10aec2f3952b305ae9611029c2/.github/workflows/urlchecker.yml
- /tmp/urlchecker-action/examples/README.md
+ /tmp/urlchecker-action/examples/README.md
-----------------------------------------
https://help.github.com/en/actions/reference/events-that-trigger-workflows
https://github.com/urlstechie/urlchecker-action/issues
@@ -386,32 +391,32 @@ You can look at `checker.checks`, which is a dictionary of result objects,
organized by the filename:
```python
-for file_name, result in checker.checks.items():
- print()
- print(result)
- print("Total Results: %s " % result.count)
- print("Total Failed: %s" % len(result.failed))
- print("Total Passed: %s" % len(result.passed))
+for file_name, result in checker.checks.items():
+ print()
+ print(result)
+ print("Total Results: %s " % result.count)
+ print("Total Failed: %s" % len(result.failed))
+ print("Total Passed: %s" % len(result.passed))
...
UrlCheck:/home/vanessa/Desktop/Code/urlstechie/urlchecker-python/tests/test_files/sample_test_file.md
-Total Results: 26
+Total Results: 26
Total Failed: 6
Total Passed: 20
UrlCheck:/home/vanessa/Desktop/Code/urlstechie/urlchecker-python/.pytest_cache/README.md
-Total Results: 1
+Total Results: 1
Total Failed: 0
Total Passed: 1
UrlCheck:/home/vanessa/Desktop/Code/urlstechie/urlchecker-python/.eggs/pytest_runner-5.2-py3.7.egg/ptr.py
-Total Results: 0
+Total Results: 0
Total Failed: 0
Total Passed: 0
UrlCheck:/home/vanessa/Desktop/Code/urlstechie/urlchecker-python/docs/source/conf.py
-Total Results: 3
+Total Results: 3
Total Failed: 0
Total Passed: 3
```
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..bb7da88
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,2 @@
+asyncio==3.4.3
+aiohttp==3.7.3
diff --git a/tests/_local_test_config.conf b/tests/_local_test_config.conf
index 06cab45..59b5391 100644
--- a/tests/_local_test_config.conf
+++ b/tests/_local_test_config.conf
@@ -1,5 +1,5 @@
[DEFAULT]
git_path_test_value = https://github.com/urlstechie/urlchecker-test-repo
-file_types_test_values = .md,.py,.c,.txt
+file_types_test_values = .md,.c,.txt
exclude_test_urls = https://github.com/SuperKogito/URLs-checker/issues/2,https://github.com/SuperKogito/URLs-checker/issues/3
exclude_test_patterns = https://github.com/SuperKogito/Voice-based-gender-recognition/issues,https://img.shields.io/
diff --git a/tests/test_client_check.py b/tests/test_client_check.py
index ba19e73..1f7a237 100644
--- a/tests/test_client_check.py
+++ b/tests/test_client_check.py
@@ -1,8 +1,28 @@
import os
import pytest
-import subprocess
+import argparse
import tempfile
+import subprocess
import configparser
+from urlchecker.client import check
+
+
+def test_client_general():
+ # excute scripts
+ pipe = subprocess.run(
+ ["urlchecker", "-h"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ assert pipe.stderr.decode("utf-8") == ""
+
+ pipe = subprocess.run(
+ ["urlchecker", "--help"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ assert pipe.stderr.decode("utf-8") == ""
+
+ pipe = subprocess.run(
+ ["urlchecker", "--version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ assert pipe.stderr.decode("utf-8") == ""
@pytest.mark.parametrize("config_fname", ["./tests/_local_test_config.conf"])
@@ -10,8 +30,8 @@
@pytest.mark.parametrize("print_all", [False, True])
@pytest.mark.parametrize("force_pass", [False, True])
@pytest.mark.parametrize("rcount", [1, 3])
-@pytest.mark.parametrize("timeout", [3, 5])
-def test_client_general(config_fname, cleanup, print_all, force_pass, rcount, timeout):
+@pytest.mark.parametrize("timeout", [5, 7])
+def test_client_check(config_fname, cleanup, print_all, force_pass, rcount, timeout):
# init config parser
config = configparser.ConfigParser()
@@ -101,3 +121,39 @@ def test_client_save(save):
if save:
if not os.path.exists(output_csv.name):
raise AssertionError
+
+
+@pytest.mark.parametrize("config_fname", ["./tests/_local_test_config.conf"])
+def test_client_check_main(config_fname):
+
+ # init config parser
+ config = configparser.ConfigParser()
+ config.read(config_fname)
+
+ # init env variables
+ path = config["DEFAULT"]["git_path_test_value"]
+ file_types = config["DEFAULT"]["file_types_test_values"]
+ exclude_urls = config["DEFAULT"]["exclude_test_urls"]
+ exclude_patterns = config["DEFAULT"]["exclude_test_patterns"]
+
+ # init args
+ args = argparse.Namespace()
+ args.path = path
+ args.branch = "master"
+ args.subfolder = "test_files"
+ args.cleanup = True
+ args.force_pass = True
+ args.no_print = True
+ args.file_types = file_types
+ args.files = ""
+ args.exclude_urls = ""
+ args.exclude_patterns = ""
+ args.exclude_files = ""
+ args.save = ""
+ args.retry_count = 1
+ args.timeout = 5
+
+ # excute script
+ with pytest.raises(SystemExit) as e:
+ check.main(args=args, extra=[])
+ assert e.value.code == 0
diff --git a/tests/test_core_check.py b/tests/test_core_check.py
index 4d4426d..7dd58e4 100644
--- a/tests/test_core_check.py
+++ b/tests/test_core_check.py
@@ -12,7 +12,7 @@
"file_paths",
[
["tests/test_files/sample_test_file.md"],
- ["tests/test_files/sample_test_file.py"],
+ ["tests/test_files/sample_test_file.c"],
["tests/test_files/sample_test_file.rst"],
],
)
diff --git a/tests/test_core_fileproc.py b/tests/test_core_fileproc.py
index 4cde137..eb4ce7b 100644
--- a/tests/test_core_fileproc.py
+++ b/tests/test_core_fileproc.py
@@ -31,9 +31,9 @@ def test_check_file_type(file_path, file_types):
@pytest.mark.parametrize(
"file_path",
- ["tests/test_files/sample_test_file.md", "tests/test_files/sample_test_file.py"],
+ ["tests/test_files/sample_test_file.txt", "tests/test_files/sample_test_file.py"],
)
-@pytest.mark.parametrize("file_types", [[".md", ".py"]])
+@pytest.mark.parametrize("file_types", [[".txt", ".py"]])
def test_check_file_type(file_path, file_types):
"""
test check file types
@@ -53,18 +53,18 @@ def test_check_file_type(file_path, file_types):
["tests/test_files/sample_test_file.md", "tests/test_files/sample_test_file.py"],
)
@pytest.mark.parametrize(
- "white_list_patterns", [["[.py]"], ["[.md]"], ["tests/test_file"]]
+ "exclude_patterns", [["[.py]"], ["[.md]"], ["tests/test_file"]]
)
-def test_include_files(file_path, white_list_patterns):
+def test_include_files(file_path, exclude_patterns):
"""
test if a file should be included based on patterns (using extension for test)
"""
_, extension = os.path.splitext(file_path)
expected = not extension in file_path
- result = include_file(file_path, white_list_patterns)
+ result = include_file(file_path, exclude_patterns)
# No files should be included for a global path pattern
- if "tests/test_file" in white_list_patterns:
+ if "tests/test_file" in exclude_patterns:
if result:
raise AssertionError
diff --git a/tests/test_core_urlproc.py b/tests/test_core_urlproc.py
index cfcd687..484e148 100644
--- a/tests/test_core_urlproc.py
+++ b/tests/test_core_urlproc.py
@@ -61,10 +61,10 @@ def test_get_user_agent():
def test_check_response_status_code():
class failedResponse:
- status_code = 500
+ status = 500
class successResponse:
- status_code = 200
+ status = 200
# Any failure returns True (indicating a retry is needed)
assert not check_response_status_code(
diff --git a/tests/test_files/.dotfile b/tests/test_files/.dotfile
new file mode 100644
index 0000000..d5cde42
--- /dev/null
+++ b/tests/test_files/.dotfile
@@ -0,0 +1,4 @@
+https://github.com/urlstechie/urlchecker-action
+https://github.com/urlstechie/urlchecker-python
+https://github.com/urlstechie/urlstechie.github.io
+https://urlstechie.github.io/
diff --git a/tests/test_files/sample_test_file.c b/tests/test_files/sample_test_file.c
new file mode 100644
index 0000000..756eed2
--- /dev/null
+++ b/tests/test_files/sample_test_file.c
@@ -0,0 +1,10 @@
+# This is a test file
+include
+
+int main() {
+ printf("https://www.google.com/");
+ printf("https://www.youtube.com/");
+ printf("https://stackoverflow.com/");
+ printf("https://github.com/");
+ return 0;
+}
diff --git a/tests/test_files/sample_test_file.md b/tests/test_files/sample_test_file.md
index 3c5b315..04b0ff6 100644
--- a/tests/test_files/sample_test_file.md
+++ b/tests/test_files/sample_test_file.md
@@ -2,14 +2,14 @@
The following is a list of test urls to extract.
- [test url 1](https://www.google.com/)
- [test url 2](https://github.com/SuperKogito)
- - [test url 3](https://github.com/SuperKogito/URLs-checker)
+ - [test url 3](https://github.com/vsoch)
- [test url 4](https://github.com/SuperKogito/URLs-checker/blob/master/README.md)
- [test url 5](https://github.com/SuperKogito/URLs-checker/issues)
- - [test url 6](https://github.com/SuperKogito/URLs-checker/issues/4)
+ - [test url 6](https://travis-ci.com/github/urlstechie)
- [test url 7](https://github.com/SuperKogito/spafe/)
-- [test url 8](https://github.com/SuperKogito/spafe/issues)
-- [test url 9](https://github.com/SuperKogito/spafe/issues/1)
+- [test url 8](https://codecov.io/gh/urlstechie)
+- [test url 9](https://github.com/urlstechie/urlchecker-action)
- [test url 10](https://github.com/SuperKogito/Voice-based-gender-recognition)
- [test url 11](https://github.com/SuperKogito/Voice-based-gender-recognition/issues)
diff --git a/tests/test_files/sample_test_file.py b/tests/test_files/sample_test_file.py
index 23f98ad..37236b3 100644
--- a/tests/test_files/sample_test_file.py
+++ b/tests/test_files/sample_test_file.py
@@ -8,6 +8,7 @@
print("This is a test file with some URLs")
url1 = "https://www.google.com/"
url2 = "https://github.com/SuperKogito"
- url3 = "https://github.com/SuperKogito/URLs-checker/README.md"
- url3 = {"url": "https://github.com/SuperKogito/URLs-checker/README.md"}
+ url3 = "https://github.com/vsoch"
+ url4 = "https://github.com/SuperKogito/URLs-checker/README.md"
+ url5 = {"url": "https://github.com/SuperKogito/URLs-checker/README.md"}
print("Done.")
diff --git a/tests/test_files/sample_test_file.txt b/tests/test_files/sample_test_file.txt
new file mode 100644
index 0000000..de76136
--- /dev/null
+++ b/tests/test_files/sample_test_file.txt
@@ -0,0 +1,8 @@
+# This is a test file
+https://github.com/urlstechie
+https://github.com/urlstechie/urlchecker-python
+https://urlstechie.github.io/
+https://superkogito.github.io/blog/urlstechie.html
+https://twitter.com
+https://anaconda.org/conda-forge/urlchecker
+https://urlchecker-python.readthedocs.io/en/latest/
diff --git a/urlchecker/client/__init__.py b/urlchecker/client/__init__.py
index 2ae663e..e7173cf 100755
--- a/urlchecker/client/__init__.py
+++ b/urlchecker/client/__init__.py
@@ -136,7 +136,7 @@ def get_parser():
check.add_argument(
"--timeout",
- help="timeout (seconds) to provide to the requests library (defaults to 5)",
+ help="timeout (minutes) to provide to the aiohttp library (defaults to 5)",
type=int,
default=5,
)
diff --git a/urlchecker/client/check.py b/urlchecker/client/check.py
index 2a1ea50..5b357e7 100644
--- a/urlchecker/client/check.py
+++ b/urlchecker/client/check.py
@@ -48,7 +48,11 @@ def main(args, extra):
sys.exit("Error %s does not exist." % path)
# Parse file types, and excluded urls and files (includes absolute and patterns)
- file_types = args.file_types.split(",")
+ file_types = []
+ if "," in args.file_types:
+ file_types = [ft for ft in args.file_types.split(",") if len(ft) > 1]
+ else:
+ file_types.append(args.file_types)
exclude_urls = remove_empty(args.exclude_urls.split(","))
exclude_patterns = remove_empty(args.exclude_patterns.split(","))
exclude_files = remove_empty(args.exclude_files.split(","))
diff --git a/urlchecker/core/urlproc.py b/urlchecker/core/urlproc.py
index beb2dae..777fad8 100644
--- a/urlchecker/core/urlproc.py
+++ b/urlchecker/core/urlproc.py
@@ -2,14 +2,17 @@
Copyright (c) 2020-2021 Ayoub Malek and Vanessa Sochat
-This source code is licensed under the terms of the MIT license.
+This source code is licensed under the terms of the MIT license.
For a copy, see .
"""
import os
+import sys
import time
import random
+import asyncio
+import aiohttp
import requests
from urlchecker.core import fileproc
from urlchecker.core.exclude import excluded
@@ -34,7 +37,7 @@ def check_response_status_code(url, response):
return True
# Case 2: success! Retry is not needed.
- if response.status_code == 200:
+ if response.status == 200:
print_success(url)
return False
@@ -146,13 +149,100 @@ def extract_urls(self):
# collect all links from file (unique=True is set)
self.urls = fileproc.collect_links_from_file(self.file_name)
- def check_urls(self, urls=None, retry_count=1, timeout=5):
+ async def aysnc_url_check(self, url, retry_count, timeout, headers):
+ """
+ Asyncronous check function for one url.
+
+ Args:
+ - urls (list) : a list of urls to check.
+ - retry_count (int) : a number of retries to issue (defaults to 1, no retry).
+ - timeout (int) : a timeout in minutes for blocking operations like the connection attempt.
+ - headers (dict) : headers to use in the request.
+ """
+ # init do retrails and retrails counts
+ do_retry = True
+ rcount = retry_count
+
+ # we will double the time for retry each time
+ retry_seconds = 15
+
+ # With retry, increase timeout by a second
+ pause = timeout
+ saved_responses = []
+ saved_errors = []
+
+ try:
+ while rcount > 0 and do_retry:
+ response = None
+ async with aiohttp.ClientSession(headers=headers) as session:
+ try:
+ async with session.get(
+ url=url,
+ raise_for_status=False,
+ timeout=aiohttp.ClientTimeout(pause * 60),
+ ) as url_response:
+ response = url_response
+
+ # if success! Retry is not needed.
+ if response is not None:
+ do_retry = False if (response.status == 200) else True
+ if (response.status == 200) and (rcount == retry_count):
+ print_success(url)
+ else:
+ saved_responses.append(response)
+
+ except Exception as e:
+ saved_errors.append(e)
+ response = None
+
+ # decrement retrials count
+ rcount -= 1
+
+ # If we try again, pause for retry seconds and update retry seconds
+ if rcount > 0 and do_retry:
+ # keep this only for debugging
+ await asyncio.sleep(retry_seconds)
+ retry_seconds = retry_seconds * 2
+ pause += 1
+
+ if len(saved_errors) > 0 or len(saved_responses) > 0:
+ print_failure(url)
+ # print errors
+ for error_msg in saved_errors:
+ print("\x1b[33m" + "> " + str(error_msg) + "\x1b[0m")
+
+ # print pevious url checks responses
+ for response_msg in saved_responses[1:]:
+ check_response_status_code(url, response_msg)
+
+ # When we break from while, we record final response
+ self.record_response(url, response)
+
+ except Exception as e:
+ print(e)
+
+ async def async_urls_check(self, urls, retry_count, timeout, headers):
+ """
+ Wrapper function for the asyncronous urls check.
+
+ Args:
+ - urls (list) : a list of urls to check.
+ - retry_count (int) : a number of retries to issue (defaults to 1, no retry).
+ - timeout (int) : a timeout in minutes for blocking operations like the connection attempt.
+ - headers (dict) : headers to use in the request.
+ """
+ ret = await asyncio.gather(
+ *[self.aysnc_url_check(url, retry_count, timeout, headers) for url in urls]
+ )
+
+ def check_urls(self, urls=None, retry_count=3, timeout=5):
"""
Check urls extracted from a certain file and print the checks results.
Args:
- - retry_count (int) : a number of retries to issue (defaults to 1, no retry).
- - timeout (int) : a timeout in seconds for blocking operations like the connection attempt.
+ - urls (list) : a list of urls to check.
+ - retry_count (int) : a number of retries to issue (defaults to 1, no retry).
+ - timeout (int) : a timeout in minutes for blocking operations like the connection attempt.
"""
urls = urls or self.urls
@@ -183,54 +273,24 @@ def check_urls(self, urls=None, retry_count=1, timeout=5):
user_agent = get_user_agent()
headers = {"User-Agent": user_agent}
- # check links
- for url in [url for url in urls if "http" in url]:
-
- # init do retrails and retrails counts
- do_retry = True
- rcount = retry_count
-
- # we will double the time for retry each time
- retry_seconds = 2
-
- # With retry, increase timeout by a second
- pause = timeout
-
- # No need to test the same URL twice
- if url in seen:
- continue
-
- seen.add(url)
- while rcount > 0 and do_retry:
- response = None
- try:
- response = requests.get(url, timeout=pause, headers=headers)
-
- except requests.exceptions.Timeout as e:
- print(e)
-
- except requests.exceptions.ConnectionError as e:
- print(e)
+ # check urls asyncronously
+ unique_urls = set([url for url in urls if "http" in url])
- except Exception as e:
- print(e)
-
- # decrement retrials count
- rcount -= 1
-
- # Break from the loop if we have success, update user
- do_retry = check_response_status_code(url, response)
-
- # If we try again, pause for retry seconds and update retry seconds
- if rcount > 0 and do_retry:
- # keep this only for debugging
- # print("Retry n° %s for %s, with timeout of %s seconds." % (retry_count - rcount, url, pause))
- time.sleep(retry_seconds)
- retry_seconds = retry_seconds * 2
- pause += 1
-
- # When we break from while, we record final response
- self.record_response(url, response)
+ # handle different py versions support
+ if (3, 7) <= sys.version_info:
+ asyncio.run(
+ self.async_urls_check(unique_urls, retry_count, timeout, headers)
+ )
+ else:
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(
+ asyncio.wait(
+ [
+ self.aysnc_url_check(url, retry_count, timeout, headers)
+ for url in unique_urls
+ ]
+ )
+ )
def record_response(self, url, response):
"""
@@ -246,7 +306,7 @@ def record_response(self, url, response):
self.failed.append(url)
# success
- elif response.status_code == 200:
+ elif response.status == 200:
self.passed.append(url)
# Any other error
diff --git a/urlchecker/logger.py b/urlchecker/logger.py
index 9fe968d..7c4db81 100644
--- a/urlchecker/logger.py
+++ b/urlchecker/logger.py
@@ -2,7 +2,7 @@
Copyright (c) 2020-2021 Ayoub Malek and Vanessa Sochat
-This source code is licensed under the terms of the MIT license.
+This source code is licensed under the terms of the MIT license.
For a copy, see .
"""
@@ -28,32 +28,3 @@ def print_success(message):
- message: the message to print in green (indicating success).
"""
print("\x1b[32m" + message + "\x1b[0m")
-
-
-def get_logger(name="urlchecker", level=logging.INFO):
- """
- Get a default logger for the urlchecker library, meaning
- that we use name "urlchecker" and use the default logging
- level INFO
-
- Parameters:
- - name: the name for the logger (defaults to urlchecker)
- - level: the logging. to set with setLevel()
-
- Returns: logging logger
- """
-
- logger = logging.getLogger(name)
- logger.setLevel(level)
-
- # Stream handler
- ch = logging.StreamHandler()
- ch.setLevel(logging.ERROR)
-
- # formatting
- formatter = logging.Formatter(
- "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
- )
- ch.setFormatter(formatter)
- logger.addHandler(ch)
- return logger
diff --git a/urlchecker/version.py b/urlchecker/version.py
index a08f8b3..3d19c71 100644
--- a/urlchecker/version.py
+++ b/urlchecker/version.py
@@ -7,7 +7,7 @@
"""
-__version__ = "0.0.22"
+__version__ = "0.1.0"
AUTHOR = "Ayoub Malek, Vanessa Sochat"
AUTHOR_EMAIL = "superkogito@gmail.com, vsochat@stanford.edu"
NAME = "urlchecker"
@@ -23,7 +23,10 @@
# Global requirements
-INSTALL_REQUIRES = (("requests", {"min_version": "2.18.4"}),)
+INSTALL_REQUIRES = (
+ ("asyncio", {"exact_version": "3.4.3"}),
+ ("aiohttp", {"exact_version": "3.7.3"}),
+)
TESTS_REQUIRES = (("pytest", {"min_version": "4.6.2"}),)