mirror of
https://github.com/hastagAB/Awesome-Python-Scripts.git
synced 2024-11-23 20:11:07 +00:00
url_shortener files were added (#191)
url_shortener.py skel was created the script has completed README.md description and requirements.txt were added The project was added to README.md
This commit is contained in:
parent
99987da841
commit
c19ba9da50
|
@ -158,6 +158,7 @@ So far, the following projects have been integrated to this repo:
|
|||
|[Remove-Duplicate-Files](Remove-Duplicate-Files)|[Aayushi Varma](https://github.com/aayuv17)
|
||||
|[PDF2text](PDF2text)|[QuangPH](https://github.com/quangph-1686a)
|
||||
|[Image Watermarker (batch)](imageWatermarker)|[Remco Halman](https://github.com/remcohalman)
|
||||
|[URL shortener](url_shortener)|[Sam Ebison](https://github.com/ebsa491)
|
||||
|
||||
|
||||
## How to use :
|
||||
|
|
27
url_shortener/README.md
Normal file
27
url_shortener/README.md
Normal file
|
@ -0,0 +1,27 @@
|
|||
# url_shortener
|
||||
|
||||
|
||||
Some urls are really long, you should short them with this script!
|
||||
This script uses `requests` for sending HTTP requests and
|
||||
uses `BeautifulSoup` from `bs4` for web scraping.
|
||||
|
||||
[![Python: 3.7](https://img.shields.io/badge/python-3.7-blue)](https://www.python.org/)
|
||||
[![Build Status](https://img.shields.io/badge/build-passing-success)](https://github.com/ebsa491)
|
||||
|
||||
### Installation
|
||||
|
||||
```shell
|
||||
(SOURCE_DIR)$ pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
```shell
|
||||
(SOURCE_DIR)$ python3 url_shortener.py [-u, --url] [URL]
|
||||
```
|
||||
|
||||
### Other
|
||||
By Sam Ebison
|
||||
|
||||
If you have found any important bug or vulnerability,
|
||||
contact me pls, I love learning ( email: ebsa491@gmail.com )
|
8
url_shortener/requirements.txt
Normal file
8
url_shortener/requirements.txt
Normal file
|
@ -0,0 +1,8 @@
|
|||
beautifulsoup4==4.9.3
|
||||
certifi==2020.6.20
|
||||
chardet==3.0.4
|
||||
idna==2.10
|
||||
pkg-resources==0.0.0
|
||||
requests==2.24.0
|
||||
soupsieve==2.0.1
|
||||
urllib3==1.25.10
|
123
url_shortener/url_shortener.py
Executable file
123
url_shortener/url_shortener.py
Executable file
|
@ -0,0 +1,123 @@
|
|||
#!/usr/bin/python3
|
||||
# Created by Sam Ebison ( https://github.com/ebsa491 )
|
||||
# If you have found any important bug or vulnerability,
|
||||
# contact me pls, I love learning ( email: ebsa491@gmail.com )
|
||||
|
||||
"""
|
||||
Some urls are really long, you should short them with this script!
|
||||
This script uses ``requests`` for sending HTTP requests and
|
||||
uses ``BeautifulSoup`` from ``bs4`` for web scraping.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
RED_COLOR = "\033[1;31m"
|
||||
GREEN_COLOR = "\033[1;32m"
|
||||
NO_COLOR = "\033[0m"
|
||||
|
||||
API_URL = "https://www.shorturl.at/shortener.php"
|
||||
API_PARAM = "u"
|
||||
# Creadit => www.shorturl.at
|
||||
|
||||
class API:
|
||||
"""A class for managing the API results."""
|
||||
|
||||
def __init__(self):
|
||||
""" __init__ """
|
||||
self.__long_url = ""
|
||||
|
||||
def set_url(self, url):
|
||||
"""This method sets the self.__long_url = url (self.__long_url setter)."""
|
||||
self.__long_url = url
|
||||
|
||||
def get_short_url(self):
|
||||
"""This method returns the self.__short_url (self.__short_url getter)."""
|
||||
return self.__short_url
|
||||
|
||||
def request_short_url(self):
|
||||
"""This method sends a POST request to the API and returns the result text."""
|
||||
|
||||
prarams = {API_PARAM: self.__long_url}
|
||||
|
||||
try:
|
||||
result = requests.post(API_URL, data = prarams)
|
||||
except ConnectionError as err:
|
||||
return -1, err
|
||||
|
||||
return 1, result.text
|
||||
|
||||
def extract_data_from_html(self, html_page):
|
||||
"""
|
||||
This method parses the html text
|
||||
and finds the input tag with id=\'shortenurl\' for shorten url.
|
||||
"""
|
||||
|
||||
# Response sample =>
|
||||
# <input id="shortenurl" onclick="this.select();" type="text" value="shorturl.at/SOME_CODE"/>
|
||||
|
||||
soup = BeautifulSoup(html_page, 'html.parser')
|
||||
input_tag = soup.find("input", attrs={"id": "shortenurl"})
|
||||
|
||||
try:
|
||||
self.__short_url = input_tag.attrs["value"]
|
||||
return 1
|
||||
except:
|
||||
return -1
|
||||
|
||||
def main():
|
||||
"""The main function of the program."""
|
||||
|
||||
if args.url == '' or args.url is None:
|
||||
args.url = input("Enter the url> ")
|
||||
|
||||
api_manager = API()
|
||||
|
||||
api_manager.set_url(args.url)
|
||||
response_stauts, result = api_manager.request_short_url() # Sends the request to the API
|
||||
|
||||
if response_stauts == -1:
|
||||
# Can't connect to the API
|
||||
|
||||
print(f"[{RED_COLOR}-{NO_COLOR}] Error in connecting to the API server...")
|
||||
ans = input("Do you want to know the error? [Y/n] ") # For more information about thr error
|
||||
if ans.lower() != 'n':
|
||||
print(result)
|
||||
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
if api_manager.extract_data_from_html(result) == -1:
|
||||
# Can't parse the html_page
|
||||
|
||||
print(f"[{RED_COLOR}-{NO_COLOR}] Error in parsing the response...")
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
print("=========================")
|
||||
print(GREEN_COLOR + api_manager.get_short_url() + NO_COLOR)
|
||||
print("=========================")
|
||||
|
||||
sys.exit(0)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
global args # The program arguments
|
||||
|
||||
parser = argparse.ArgumentParser(description="URL Shortener")
|
||||
|
||||
# -u | --url URL
|
||||
parser.add_argument(
|
||||
'-u',
|
||||
'--url',
|
||||
metavar='url',
|
||||
type=str,
|
||||
default='',
|
||||
help='the URL'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
main()
|
Loading…
Reference in New Issue
Block a user