mirror of
https://github.com/TheAlgorithms/Python.git
synced 2024-11-23 21:11:08 +00:00
[mypy] fix small folders 2 (#4293)
* Update perceptron.py * Update binary_tree_traversals.py * fix machine_learning * Update build.yml * Update perceptron.py * Update machine_learning/forecasting/run.py Co-authored-by: Christian Clauss <cclauss@me.com>
This commit is contained in:
parent
959507901a
commit
9b60be67af
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
|
@ -38,10 +38,13 @@ jobs:
|
|||
genetic_algorithm
|
||||
geodesy
|
||||
knapsack
|
||||
machine_learning
|
||||
networking_flow
|
||||
neural_network
|
||||
quantum
|
||||
scheduling
|
||||
sorts
|
||||
traversals
|
||||
- name: Run tests
|
||||
run: pytest --doctest-modules --ignore=project_euler/ --ignore=scripts/ --cov-report=term-missing:skip-covered --cov=. .
|
||||
- if: ${{ success() }}
|
||||
|
|
|
@ -29,8 +29,7 @@ def linear_regression_prediction(
|
|||
>>> abs(n - 5.0) < 1e-6 # Checking precision because of floating point errors
|
||||
True
|
||||
"""
|
||||
x = [[1, item, train_mtch[i]] for i, item in enumerate(train_dt)]
|
||||
x = np.array(x)
|
||||
x = np.array([[1, item, train_mtch[i]] for i, item in enumerate(train_dt)])
|
||||
y = np.array(train_usr)
|
||||
beta = np.dot(np.dot(np.linalg.inv(np.dot(x.transpose(), x)), x.transpose()), y)
|
||||
return abs(beta[0] + test_dt[0] * beta[1] + test_mtch[0] + beta[2])
|
||||
|
|
|
@ -200,7 +200,7 @@ if False: # change to true to run this test case.
|
|||
|
||||
|
||||
def ReportGenerator(
|
||||
df: pd.DataFrame, ClusteringVariables: np.array, FillMissingReport=None
|
||||
df: pd.DataFrame, ClusteringVariables: np.ndarray, FillMissingReport=None
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Function generates easy-erading clustering report. It takes 2 arguments as an input:
|
||||
|
|
|
@ -61,7 +61,7 @@ def term_frequency(term: str, document: str) -> int:
|
|||
return len([word for word in tokenize_document if word.lower() == term.lower()])
|
||||
|
||||
|
||||
def document_frequency(term: str, corpus: str) -> int:
|
||||
def document_frequency(term: str, corpus: str) -> tuple[int, int]:
|
||||
"""
|
||||
Calculate the number of documents in a corpus that contain a
|
||||
given term
|
||||
|
|
|
@ -11,7 +11,14 @@ import random
|
|||
|
||||
|
||||
class Perceptron:
|
||||
def __init__(self, sample, target, learning_rate=0.01, epoch_number=1000, bias=-1):
|
||||
def __init__(
|
||||
self,
|
||||
sample: list[list[float]],
|
||||
target: list[int],
|
||||
learning_rate: float = 0.01,
|
||||
epoch_number: int = 1000,
|
||||
bias: float = -1,
|
||||
) -> None:
|
||||
"""
|
||||
Initializes a Perceptron network for oil analysis
|
||||
:param sample: sample dataset of 3 parameters with shape [30,3]
|
||||
|
@ -46,7 +53,7 @@ class Perceptron:
|
|||
self.bias = bias
|
||||
self.number_sample = len(sample)
|
||||
self.col_sample = len(sample[0]) # number of columns in dataset
|
||||
self.weight = []
|
||||
self.weight: list = []
|
||||
|
||||
def training(self) -> None:
|
||||
"""
|
||||
|
@ -94,7 +101,7 @@ class Perceptron:
|
|||
# if epoch_count > self.epoch_number or not error:
|
||||
break
|
||||
|
||||
def sort(self, sample) -> None:
|
||||
def sort(self, sample: list[float]) -> None:
|
||||
"""
|
||||
:param sample: example row to classify as P1 or P2
|
||||
:return: None
|
||||
|
@ -221,11 +228,11 @@ if __name__ == "__main__":
|
|||
print("Finished training perceptron")
|
||||
print("Enter values to predict or q to exit")
|
||||
while True:
|
||||
sample = []
|
||||
sample: list = []
|
||||
for i in range(len(samples[0])):
|
||||
observation = input("value: ").strip()
|
||||
if observation == "q":
|
||||
user_input = input("value: ").strip()
|
||||
if user_input == "q":
|
||||
break
|
||||
observation = float(observation)
|
||||
observation = float(user_input)
|
||||
sample.insert(i, observation)
|
||||
network.sort(sample)
|
||||
|
|
|
@ -188,7 +188,7 @@ def pre_order_iter(node: TreeNode) -> None:
|
|||
"""
|
||||
if not isinstance(node, TreeNode) or not node:
|
||||
return
|
||||
stack: List[TreeNode] = []
|
||||
stack: list[TreeNode] = []
|
||||
n = node
|
||||
while n or stack:
|
||||
while n: # start from root node, find its left child
|
||||
|
@ -218,7 +218,7 @@ def in_order_iter(node: TreeNode) -> None:
|
|||
"""
|
||||
if not isinstance(node, TreeNode) or not node:
|
||||
return
|
||||
stack: List[TreeNode] = []
|
||||
stack: list[TreeNode] = []
|
||||
n = node
|
||||
while n or stack:
|
||||
while n:
|
||||
|
|
Loading…
Reference in New Issue
Block a user