isort --profile black . (#2181)

* updating DIRECTORY.md

* isort --profile black .

* Black after

* updating DIRECTORY.md

Co-authored-by: github-actions <${GITHUB_ACTOR}@users.noreply.github.com>
This commit is contained in:
Christian Clauss 2020-07-06 09:44:19 +02:00 committed by GitHub
parent cd3e8f95a0
commit 5f4da5d616
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
80 changed files with 123 additions and 127 deletions

View File

@ -17,7 +17,7 @@ jobs:
if: failure()
run: |
black .
isort --profile black --recursive .
isort --profile black .
git config --global user.name github-actions
git config --global user.email '${GITHUB_ACTOR}@users.noreply.github.com'
git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/$GITHUB_REPOSITORY

View File

@ -253,6 +253,7 @@
* [Finding Bridges](https://github.com/TheAlgorithms/Python/blob/master/graphs/finding_bridges.py)
* [Frequent Pattern Graph Miner](https://github.com/TheAlgorithms/Python/blob/master/graphs/frequent_pattern_graph_miner.py)
* [G Topological Sort](https://github.com/TheAlgorithms/Python/blob/master/graphs/g_topological_sort.py)
* [Gale Shapley Bigraph](https://github.com/TheAlgorithms/Python/blob/master/graphs/gale_shapley_bigraph.py)
* [Graph List](https://github.com/TheAlgorithms/Python/blob/master/graphs/graph_list.py)
* [Graph Matrix](https://github.com/TheAlgorithms/Python/blob/master/graphs/graph_matrix.py)
* [Graphs Floyd Warshall](https://github.com/TheAlgorithms/Python/blob/master/graphs/graphs_floyd_warshall.py)
@ -596,6 +597,7 @@
## Searches
* [Binary Search](https://github.com/TheAlgorithms/Python/blob/master/searches/binary_search.py)
* [Double Linear Search](https://github.com/TheAlgorithms/Python/blob/master/searches/double_linear_search.py)
* [Fibonacci Search](https://github.com/TheAlgorithms/Python/blob/master/searches/fibonacci_search.py)
* [Hill Climbing](https://github.com/TheAlgorithms/Python/blob/master/searches/hill_climbing.py)
* [Interpolation Search](https://github.com/TheAlgorithms/Python/blob/master/searches/interpolation_search.py)

View File

@ -6,9 +6,10 @@ flake8 : passed
mypy : passed
"""
from numpy import array, cos, sin, radians, cross # type: ignore
from typing import List
from numpy import array, cos, cross, radians, sin # type: ignore
def polar_force(
magnitude: float, angle: float, radian_mode: bool = False

View File

@ -2,9 +2,9 @@
# Author: Syed Haseeb Shah (github.com/QuantumNovice)
# The Newton-Raphson method (also known as Newton's method) is a way to
# quickly find a good approximation for the root of a real-valued function
from decimal import Decimal
from math import * # noqa: F401, F403
from sympy import diff

View File

@ -35,8 +35,8 @@ https://www.youtube.com/watch?v=kfmNeskzs2o
https://www.youtube.com/watch?v=4RhLNDqcjpA
"""
import string
import numpy

View File

@ -1,5 +1,5 @@
import string
import itertools
import string
def chunker(seq, size):

View File

@ -10,7 +10,7 @@ without needing to store any additional data except the position of the first
original character. The BWT is thus a "free" method of improving the efficiency
of text compression algorithms, costing only some extra computation.
"""
from typing import List, Dict
from typing import Dict, List
def all_rotations(s: str) -> List[str]:

View File

@ -1,5 +1,5 @@
import numpy as np
import cv2
import numpy as np
"""
Harris Corner Detector

View File

@ -35,7 +35,7 @@ https://www.geeksforgeeks.org/segment-tree-efficient-implementation/
>>> st.query(0, 2)
[1, 2, 3]
"""
from typing import List, Callable, TypeVar
from typing import Callable, List, TypeVar
T = TypeVar("T")

View File

@ -3,9 +3,8 @@ Segment_tree creates a segment tree with a given array and function,
allowing queries to be done later in log(N) time
function takes 2 values and returns a same type value
"""
from queue import Queue
from collections.abc import Sequence
from queue import Queue
class SegmentTreeNode(object):

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
from hash_table import HashTable
from number_theory.prime_numbers import next_prime, check_prime
from number_theory.prime_numbers import check_prime, next_prime
class DoubleHash(HashTable):

View File

@ -1,6 +1,7 @@
from hash_table import HashTable
from collections import deque
from hash_table import HashTable
class HashTableWithLinkedList(HashTable):
def __init__(self, *args, **kwargs):

View File

@ -1,8 +1,7 @@
"""
Implemented an algorithm using opencv to convert a colored image into its negative
"""
from cv2 import imread, imshow, waitKey, destroyAllWindows
from cv2 import destroyAllWindows, imread, imshow, waitKey
def convert_to_negative(img):

View File

@ -1,8 +1,8 @@
"""
Implementation Burke's algorithm (dithering)
"""
from cv2 import destroyAllWindows, imread, imshow, waitKey
import numpy as np
from cv2 import destroyAllWindows, imread, imshow, waitKey
class Burkes:

View File

@ -1,5 +1,6 @@
import cv2
import numpy as np
from digital_image_processing.filters.convolve import img_convolve
from digital_image_processing.filters.sobel_filter import sobel_filter

View File

@ -9,11 +9,11 @@ Inputs:
Output:
img:A 2d zero padded image with values in between 0 and 1
"""
import math
import sys
import cv2
import numpy as np
import math
import sys
def vec_gaussian(img: np.ndarray, variance: float) -> np.ndarray:

View File

@ -1,8 +1,8 @@
# @Author : lightXu
# @File : convolve.py
# @Time : 2019/7/8 0008 下午 16:13
from cv2 import imread, cvtColor, COLOR_BGR2GRAY, imshow, waitKey
from numpy import array, zeros, ravel, pad, dot, uint8
from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey
from numpy import array, dot, pad, ravel, uint8, zeros
def im2col(image, block_size):

View File

@ -1,10 +1,11 @@
"""
Implementation of gaussian filter algorithm
"""
from cv2 import imread, cvtColor, COLOR_BGR2GRAY, imshow, waitKey
from numpy import pi, mgrid, exp, square, zeros, ravel, dot, uint8
from itertools import product
from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey
from numpy import dot, exp, mgrid, pi, ravel, square, uint8, zeros
def gen_gaussian_kernel(k_size, sigma):
center = k_size // 2

View File

@ -1,9 +1,8 @@
"""
Implementation of median filter algorithm
"""
from cv2 import imread, cvtColor, COLOR_BGR2GRAY, imshow, waitKey
from numpy import zeros_like, ravel, sort, multiply, divide, int8
from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey
from numpy import divide, int8, multiply, ravel, sort, zeros_like
def median_filter(gray_img, mask=3):

View File

@ -2,7 +2,8 @@
# @File : sobel_filter.py
# @Time : 2019/7/8 0008 下午 16:26
import numpy as np
from cv2 import imread, cvtColor, COLOR_BGR2GRAY, imshow, waitKey
from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey
from digital_image_processing.filters.convolve import img_convolve

View File

@ -6,10 +6,9 @@ Created on Fri Sep 28 15:22:29 2018
import copy
import os
import numpy as np
import cv2
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import pyplot as plt
class contrastStretch:

View File

@ -1,6 +1,6 @@
""" Multiple image resizing techniques """
import numpy as np
from cv2 import imread, imshow, waitKey, destroyAllWindows
from cv2 import destroyAllWindows, imread, imshow, waitKey
class NearestNeighbour:

View File

@ -1,6 +1,6 @@
from matplotlib import pyplot as plt
import numpy as np
import cv2
import numpy as np
from matplotlib import pyplot as plt
def get_rotation(

View File

@ -1,8 +1,7 @@
"""
Implemented an algorithm using opencv to tone an image with sepia technique
"""
from cv2 import imread, imshow, waitKey, destroyAllWindows
from cv2 import destroyAllWindows, imread, imshow, waitKey
def make_sepia(img, factor: int):

View File

@ -1,21 +1,21 @@
"""
PyTest's for Digital Image Processing
"""
import digital_image_processing.edge_detection.canny as canny
import digital_image_processing.filters.gaussian_filter as gg
import digital_image_processing.filters.median_filter as med
import digital_image_processing.filters.sobel_filter as sob
import digital_image_processing.filters.convolve as conv
import digital_image_processing.change_contrast as cc
import digital_image_processing.convert_to_negative as cn
import digital_image_processing.sepia as sp
import digital_image_processing.dithering.burkes as bs
import digital_image_processing.resize.resize as rs
from cv2 import imread, cvtColor, COLOR_BGR2GRAY
from cv2 import COLOR_BGR2GRAY, cvtColor, imread
from numpy import array, uint8
from PIL import Image
from digital_image_processing import change_contrast as cc
from digital_image_processing import convert_to_negative as cn
from digital_image_processing import sepia as sp
from digital_image_processing.dithering import burkes as bs
from digital_image_processing.edge_detection import canny as canny
from digital_image_processing.filters import convolve as conv
from digital_image_processing.filters import gaussian_filter as gg
from digital_image_processing.filters import median_filter as med
from digital_image_processing.filters import sobel_filter as sob
from digital_image_processing.resize import resize as rs
img = imread(r"digital_image_processing/image_data/lena_small.jpg")
gray = cvtColor(img, COLOR_BGR2GRAY)

View File

@ -1,5 +1,5 @@
from itertools import accumulate
from bisect import bisect
from itertools import accumulate
def fracKnapsack(vl, wt, W, n):

View File

@ -73,9 +73,10 @@ if __name__ == "__main__":
A random simulation of this algorithm.
"""
import time
import matplotlib.pyplot as plt
from random import randint
from matplotlib import pyplot as plt
inputs = [10, 100, 1000, 10000, 50000, 100000, 200000, 300000, 400000, 500000]
tim = []
for i in inputs:

View File

@ -16,9 +16,7 @@
# frequencies will be placed near the root of the tree while the nodes
# with low frequencies will be placed near the leaves of the tree thus
# reducing search time in the most frequent instances.
import sys
from random import randint

View File

@ -9,7 +9,6 @@ Python:
import numpy as np
import skfuzzy as fuzz
if __name__ == "__main__":
# Create universe of discourse in Python using linspace ()
X = np.linspace(start=0, stop=75, num=75, endpoint=True, retstep=False)
@ -45,7 +44,7 @@ if __name__ == "__main__":
# max-product composition
# Plot each set A, set B and each operation result using plot() and subplot().
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
plt.figure()

View File

@ -1,4 +1,5 @@
from math import atan, cos, radians, sin, tan
from haversine_distance import haversine_distance

View File

@ -1,7 +1,7 @@
# https://en.wikipedia.org/wiki/B%C3%A9zier_curve
# https://www.tutorialspoint.com/computer_graphics/computer_graphics_curves.htm
from typing import List, Tuple
from scipy.special import comb
@ -78,7 +78,7 @@ class BezierCurve:
step_size: defines the step(s) at which to evaluate the Bezier curve.
The smaller the step size, the finer the curve produced.
"""
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
to_plot_x: List[float] = [] # x coordinates of points to plot
to_plot_y: List[float] = [] # y coordinates of points to plot

View File

@ -1,6 +1,5 @@
from collections import deque
if __name__ == "__main__":
# Accept No. of Nodes and edges
n, m = map(int, input().split(" "))

View File

@ -12,8 +12,7 @@ while Q is non-empty:
mark w as explored
add w to Q (at the end)
"""
from typing import Set, Dict
from typing import Dict, Set
G = {
"A": ["B", "C"],

View File

@ -11,8 +11,7 @@ Pseudocode:
if v unexplored:
DFS(G, v)
"""
from typing import Set, Dict
from typing import Dict, Set
def depth_first_search(graph: Dict, start: str) -> Set[int]:

View File

@ -1,7 +1,7 @@
from collections import deque
import random as rand
import math as math
import random as rand
import time
from collections import deque
# the default weight is 1 if not assigned but all the implementation is weighted

View File

@ -1,4 +1,5 @@
import heapq
import numpy as np

View File

@ -1,4 +1,5 @@
import unittest
import greedy_knapsack as kp

View File

@ -23,10 +23,9 @@ state. After the last block is processed the current hash state is returned as
the final hash.
Reference: https://deadhacker.com/2006/02/21/sha-1-illustrated/
"""
import argparse
import struct
import hashlib # hashlib is only used inside the Test class
import struct
import unittest

View File

@ -6,9 +6,16 @@ Created on Mon Feb 26 15:40:07 2018
This file contains the test-suite for the linear algebra library.
"""
import unittest
from lib import Matrix, Vector, axpy, squareZeroMatrix, unitBasisVector, zeroVector
from lib import (
Matrix,
Vector,
axpy,
squareZeroMatrix,
unitBasisVector,
zeroVector,
)
class Test(unittest.TestCase):

View File

@ -1,10 +1,9 @@
# Gaussian Naive Bayes Example
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import plot_confusion_matrix
from matplotlib import pyplot as plt
from sklearn.datasets import load_iris
from sklearn.metrics import plot_confusion_matrix
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn.naive_bayes import GaussianNB
def main():

View File

@ -52,11 +52,12 @@ Usage:
"""
import warnings
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.metrics import pairwise_distances
import warnings
warnings.filterwarnings("ignore")
@ -193,7 +194,7 @@ def kmeans(
# Mock test below
if False: # change to true to run this test case.
import sklearn.datasets as ds
from sklearn import datasets as ds
dataset = ds.load_iris()
k = 3

View File

@ -1,5 +1,6 @@
import numpy as np
from collections import Counter
import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split

View File

@ -1,5 +1,5 @@
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
# Load iris file

View File

@ -41,11 +41,9 @@
Author: @EverLookNeverSee
"""
from math import log
from os import name, system
from random import gauss
from random import seed
from random import gauss, seed
# Make a training dataset drawn from a gaussian distribution

View File

@ -7,8 +7,8 @@ We try to set the weight of these features, over many iterations, so that they b
fit our dataset. In this particular code, I had used a CSGO dataset (ADR vs
Rating). We try to best fit a line through dataset and estimate the parameters.
"""
import requests
import numpy as np
import requests
def collect_dataset():

View File

@ -14,14 +14,12 @@ Helpful resources:
Coursera ML course
https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from sklearn import datasets
# get_ipython().run_line_magic('matplotlib', 'inline')
from sklearn import datasets
# In[67]:

View File

@ -4,14 +4,12 @@
* http://colah.github.io/posts/2015-08-Understanding-LSTMs
* https://en.wikipedia.org/wiki/Long_short-term_memory
"""
from keras.layers import Dense, LSTM
from keras.models import Sequential
import numpy as np
import pandas as pd
from keras.layers import LSTM, Dense
from keras.models import Sequential
from sklearn.preprocessing import MinMaxScaler
if __name__ == "__main__":
"""
First part of building a model is to get the data and prepare

View File

@ -1,6 +1,5 @@
from sklearn.neural_network import MLPClassifier
X = [[0.0, 0.0], [1.0, 1.0], [1.0, 0.0], [0.0, 1.0]]
y = [0, 1, 0, 0]

View File

@ -1,5 +1,5 @@
import matplotlib.pyplot as plt
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.linear_model import LinearRegression
# Splitting the dataset into the Training set and Test set

View File

@ -1,10 +1,9 @@
# Random Forest Classifier Example
from matplotlib import pyplot as plt
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import plot_confusion_matrix
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
def main():

View File

@ -1,10 +1,8 @@
# Random Forest Regressor Example
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.model_selection import train_test_split
def main():

View File

@ -36,9 +36,9 @@ import os
import sys
import urllib.request
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from sklearn.datasets import make_blobs, make_circles
from sklearn.preprocessing import StandardScaler

View File

@ -1,5 +1,5 @@
from sklearn.datasets import load_iris
from sklearn import svm
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split

View File

@ -1,4 +1,4 @@
from typing import Tuple, List
from typing import List, Tuple
def n31(a: int) -> Tuple[List[int], int]:

View File

@ -7,10 +7,10 @@
reference-->Su, Francis E., et al. "Fibonacci Number Formula." Math Fun Facts.
<http://www.math.hmc.edu/funfacts>
"""
import math
import functools
import math
import time
from decimal import getcontext, Decimal
from decimal import Decimal, getcontext
getcontext().prec = 100

View File

@ -1,6 +1,7 @@
import math
from scipy.integrate import quad
from numpy import inf
from scipy.integrate import quad
def gamma(num: float) -> float:

View File

@ -5,7 +5,7 @@ python/black : True
python : 3.7.3
"""
from numpy import pi, sqrt, exp
from numpy import exp, pi, sqrt
def gaussian(x, mu: float = 0.0, sigma: float = 1.0) -> int:

View File

@ -1,5 +1,5 @@
from typing import Callable, Union
import math as m
from typing import Callable, Union
def line_length(

View File

@ -5,8 +5,8 @@ python/black : True
flake8 : True
"""
from maths.prime_factors import prime_factors
from maths.is_square_free import is_square_free
from maths.prime_factors import prime_factors
def mobius(n: int) -> int:

View File

@ -7,7 +7,6 @@
limit is reached or the gradient f'(x[n]) approaches zero. In both cases, exception
is raised. If iteration limit is reached, try increasing maxiter.
"""
import math as m
@ -42,7 +41,7 @@ def newton_raphson(f, x0=0, maxiter=100, step=0.0001, maxerror=1e-6, logsteps=Fa
if __name__ == "__main__":
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
f = lambda x: m.tanh(x) ** 2 - m.exp(3 * x) # noqa: E731
solution, error, steps = newton_raphson(

View File

@ -1,5 +1,5 @@
from typing import Generator
import math
from typing import Generator
def slow_primes(max: int) -> Generator[int, None, None]:

View File

@ -9,9 +9,9 @@ After through ReLU, the element of the vector always 0 or real number.
Script inspired from its corresponding Wikipedia article
https://en.wikipedia.org/wiki/Rectifier_(neural_networks)
"""
from typing import List
import numpy as np
from typing import List
def relu(vector: List[float]):

View File

@ -3,8 +3,8 @@ Find Volumes of Various Shapes.
Wikipedia reference: https://en.wikipedia.org/wiki/Volume
"""
from typing import Union
from math import pi, pow
from typing import Union
def vol_cube(side_length: Union[int, float]) -> float:

View File

@ -1,5 +1,5 @@
import datetime
import argparse
import datetime
def zeller(date_input: str) -> str:

View File

@ -6,11 +6,13 @@ the pytest run configuration
-vv -m mat_ops -p no:cacheprovider
"""
import logging
# standard libraries
import sys
import numpy as np
import pytest
import logging
# Custom/local libraries
from matrix import matrix_operation as matop

View File

@ -17,9 +17,8 @@ Github : https://github.com/RiptideBo
Date: 2017.11.23
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
def sigmoid(x):

View File

@ -14,8 +14,9 @@
- - - - - -- - - - - - - - - - - - - - - - - - - - - - -
"""
import pickle
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
class CNN:

View File

@ -1,5 +1,5 @@
from abc import abstractmethod
import sys
from abc import abstractmethod
from collections import deque

View File

@ -27,8 +27,8 @@ Credits: This code was written by editing the code from
http://www.riannetrujillo.com/blog/python-fractal/
"""
import turtle
import sys
import turtle
PROGNAME = "Sierpinski Triangle"

View File

@ -5,8 +5,8 @@ By listing the first six prime numbers:
We can see that the 6th prime is 13. What is the Nth prime number?
"""
import math
import itertools
import math
def primeCheck(number):

View File

@ -15,7 +15,6 @@ words?
"""
import os
# Precomputes a list of the 100 first triangular numbers
TRIANGULAR_NUMBERS = [int(0.5 * n * (n + 1)) for n in range(1, 101)]

View File

@ -3,9 +3,9 @@ Shortest job remaining first
Please note arrival time and burst
Please use spaces to separate times entered.
"""
from typing import List
import pandas as pd
from typing import List
def calculate_waitingtime(

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python3
import os
from build_directory_md import good_file_paths
filepaths = list(good_file_paths())

View File

@ -150,7 +150,7 @@ def hill_climbing(
solution_found = True
if visualization:
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
plt.plot(range(iterations), scores)
plt.xlabel("Iterations")

View File

@ -84,7 +84,7 @@ def simulated_annealing(
current_state = next_state
if visualization:
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
plt.plot(range(iterations), scores)
plt.xlabel("Iterations")

View File

@ -24,9 +24,8 @@ python tabu_search.py -f your_file_name.txt -number_of_iterations_of_tabu_search
-s size_of_tabu_search
e.g. python tabu_search.py -f tabudata2.txt -i 4 -s 3
"""
import copy
import argparse
import copy
def generate_neighbours(path):

View File

@ -3,8 +3,8 @@
#
# Sort large text files in a minimum amount of memory
#
import os
import argparse
import os
class FileSplitter:

View File

@ -1,5 +1,6 @@
from random import randint
from tempfile import TemporaryFile
import numpy as np

View File

@ -1,10 +1,9 @@
import sys
import webbrowser
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
import requests
if __name__ == "__main__":
print("Googling.....")

View File

@ -1,5 +1,5 @@
from bs4 import BeautifulSoup
import requests
from bs4 import BeautifulSoup
def imdb_top(imdb_top_n):