Reenable files when TensorFlow supports the current Python (#8602)

* Remove python_version < "3.11" for tensorflow

* Reenable neural_network/input_data.py_tf

* updating DIRECTORY.md

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Try to fix ruff

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Try to fix ruff

* Try to fix ruff

* Try to fix ruff

* Try to fix pre-commit

* Try to fix

* Fix

* Fix

* Reenable dynamic_programming/k_means_clustering_tensorflow.py_tf

* updating DIRECTORY.md

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* Try to fix ruff

---------

Co-authored-by: github-actions <${GITHUB_ACTOR}@users.noreply.github.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Maxim Smolskiy 2023-04-01 20:43:11 +03:00 committed by GitHub
parent 84b6852de8
commit 56a40eb3ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 54 additions and 55 deletions

View File

@ -309,6 +309,7 @@
* [Floyd Warshall](dynamic_programming/floyd_warshall.py)
* [Integer Partition](dynamic_programming/integer_partition.py)
* [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py)
* [K Means Clustering Tensorflow](dynamic_programming/k_means_clustering_tensorflow.py)
* [Knapsack](dynamic_programming/knapsack.py)
* [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py)
* [Longest Common Substring](dynamic_programming/longest_common_substring.py)
@ -685,6 +686,7 @@
* [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py)
* [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py)
* [Convolution Neural Network](neural_network/convolution_neural_network.py)
* [Input Data](neural_network/input_data.py)
* [Perceptron](neural_network/perceptron.py)
* [Simple Neural Network](neural_network/simple_neural_network.py)

View File

@ -1,9 +1,10 @@
import tensorflow as tf
from random import shuffle
import tensorflow as tf
from numpy import array
def TFKMeansCluster(vectors, noofclusters):
def tf_k_means_cluster(vectors, noofclusters):
"""
K-Means Clustering using TensorFlow.
'vectors' should be a n*k 2-D NumPy array, where n is the number
@ -30,7 +31,6 @@ def TFKMeansCluster(vectors, noofclusters):
graph = tf.Graph()
with graph.as_default():
# SESSION OF COMPUTATION
sess = tf.Session()
@ -95,8 +95,7 @@ def TFKMeansCluster(vectors, noofclusters):
# iterations. To keep things simple, we will only do a set number of
# iterations, instead of using a Stopping Criterion.
noofiterations = 100
for iteration_n in range(noofiterations):
for _ in range(noofiterations):
##EXPECTATION STEP
##Based on the centroid locations till last iteration, compute
##the _expected_ centroid assignments.

View File

@ -21,13 +21,10 @@ This module and all its submodules are deprecated.
import collections
import gzip
import os
import urllib
import numpy
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import dtypes, random_seed
from tensorflow.python.platform import gfile
from tensorflow.python.util.deprecation import deprecated
@ -206,8 +203,8 @@ class _DataSet:
else:
fake_label = 0
return (
[fake_image for _ in xrange(batch_size)],
[fake_label for _ in xrange(batch_size)],
[fake_image for _ in range(batch_size)],
[fake_label for _ in range(batch_size)],
)
start = self._index_in_epoch
# Shuffle for the first epoch
@ -262,7 +259,7 @@ def _maybe_download(filename, work_directory, source_url):
gfile.MakeDirs(work_directory)
filepath = os.path.join(work_directory, filename)
if not gfile.Exists(filepath):
urllib.request.urlretrieve(source_url, filepath)
urllib.request.urlretrieve(source_url, filepath) # noqa: S310
with gfile.GFile(filepath) as f:
size = f.size()
print("Successfully downloaded", filename, size, "bytes.")
@ -328,7 +325,8 @@ def read_data_sets(
if not 0 <= validation_size <= len(train_images):
raise ValueError(
f"Validation size should be between 0 and {len(train_images)}. Received: {validation_size}."
f"Validation size should be between 0 and {len(train_images)}. "
f"Received: {validation_size}."
)
validation_images = train_images[:validation_size]
@ -336,7 +334,7 @@ def read_data_sets(
train_images = train_images[validation_size:]
train_labels = train_labels[validation_size:]
options = dict(dtype=dtype, reshape=reshape, seed=seed)
options = {"dtype": dtype, "reshape": reshape, "seed": seed}
train = _DataSet(train_images, train_labels, **options)
validation = _DataSet(validation_images, validation_labels, **options)

View File

@ -15,7 +15,7 @@ scikit-fuzzy
scikit-learn
statsmodels
sympy
tensorflow; python_version < "3.11"
tensorflow
texttable
tweepy
xgboost