 
 


     ,           .    ,         ,     ,        .  ,    ,   ,    (,   ),      ,   .      ,    ,    .   ,               .  ,               ,          .        ,        .





 

 


   ,     .










    ,      .    ,       ,     .     ,       .

 ,      ,            .     ,     ,  ,    ,      .

             .            ,        ,      .

        ,           .    ,   ,   ,       ,        ,   .

            .          ,         .               ,           .



   :

,         ;

          ;

          ;

    ,            ;

      ,    ,  ,  ,    ;

       ,       .



             ,        .      ,         .       ,          .

 ,       ,   ,     .   ,    ,    IT    ,           .       !




 1:  


           .     ,    ,    . ,  ,   ,    .

   .       ,        .           ,    .

       .  ,        ,       ,                .

    ,        .     ,  ,          .         ,   . ,     ,  , ReLU (Rectified Linear Unit)   .

    ,    .    ,        ,    .     ,       .            ,     .

    ,   ,    ,      ,           .     ,        ,    .

 ,               .      .              .    ,       ,   .

          ,       .     ,       .

    ,      ,     :


   :

       , , 28x28.

         .          ,       ,     .     ,      .

      ,  ,    .         ,         .

       ,         0  9.          .


  :

         .       ,      .

      ,    .  ,                  .

 ,      ,           .     ,  


 :

    ,    , ,    . .

          ,    . ,       ,         .

        , ,       .


  :

    ,      .

            . ,   ,   , ,     .

       ,        .



    ,        .              ,           . ,        ,   ,    ,            .

,    ,               .     .

TensorFlow:       ,   Google. TensorFlow         ,      .

Keras:       ,    TensorFlow. Keras             .

PyTorch:       ,   Facebook. PyTorch               .

Scikit-learn:        Python. Scikit-learn       ,     ,        .



            . ,         ,        .



          TensorFlow.


     . 

                    TensorFlow.       .

      TensorFlow       :



import tensorflow as tf

from tensorflow import keras

#   MNIST

(train_images, train_labels), (test_images, test_labels) = keras.datasets.mnist.load_data()



      ,     ,   .



train_images = train_images.reshape((60000, 28, 28, 1))

train_images = train_images / 255.0

test_images = test_images.reshape((10000, 28, 28, 1))

test_images = test_images / 255.0



      .            ,        (max pooling),    .      10 ,   ,    softmax.



model = keras.Sequential([

keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)),

keras.layers.MaxPooling2D((2, 2)),

keras.layers.Conv2D(64, (3, 3), activation='relu'),

keras.layers.MaxPooling2D((2, 2)),

keras.layers.Conv2D(64, (3, 3), activation='relu'),

keras.layers.Flatten(),

keras.layers.Dense(64, activation='relu'),

keras.layers.Dense(10, activation='softmax')

])



    ,   ,       .

model.compile(optimizer='adam',

loss='sparse_categorical_crossentropy',

metrics=['accuracy'])



      ,             ()    ( ,    ).

model.fit(train_images, train_labels, epochs=5, batch_size=64, validation_data=(test_images, test_labels))



,        .



test_loss, test_acc = model.evaluate(test_images, test_labels)

print('Test accuracy)

         ,           ,     .

         ,        CIFAR-10.           ,       .                   .

            (..  )       .       ,         .

           , ,         ,       ,     .


2.     . 

      TensorFlow     CIFAR-10,        TensorFlow.

 CIFAR-10  60000    3232 ,   10 .       50000 ,      10000.

       TensorFlow:



import tensorflow as tf

from tensorflow import keras

from tensorflow.keras import layers

#   

model = keras.Sequential(

[

layers.LSTM(128, input_shape=(None, 13)),

layers.Dense(64, activation="relu"),

layers.Dense(32, activation="relu"),

layers.Dense(10, activation="softmax"),

]

)

#  

model.compile(

optimizer=keras.optimizers.Adam(learning_rate=0.001),

loss=keras.losses.CategoricalCrossentropy(),

metrics=["accuracy"],

)

#   

audio_file = tf.io.read_file("audio.wav")

audio, _ = tf.audio.decode_wav(audio_file)

audio = tf.squeeze(audio, axis=-1)

audio = tf.cast(audio, tf.float32)

#   

frame_length = 640

frame_step = 320

audio_length = tf.shape(audio)[0]

num_frames = tf.cast(tf.math.ceil(audio_length / frame_step), tf.int32)

padding_length = num_frames * frame_step  audio_length

audio = tf.pad(audio, [[0, padding_length]])

audio = tf.reshape(audio, [num_frames, frame_length])

#   MFCC

mfccs = tf.signal.mfccs_from_log_mel_spectrograms(

tf.math.log(tf.abs(tf.signal.stft(audio))),

audio.shape[-1],

num_mel_bins=13,

dct_coefficient_count=13,

)

#    

labels = ["one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "zero"]

label_to_index = dict(zip(labels, range(len(labels))))

index_to_label = dict(zip(range(len(labels)), labels))

text = "one two three four five six seven eight nine zero"

target = tf.keras.preprocessing.text.one_hot(text, len(labels))

X_train = mfccs[None, ]

y_train = target[None, ]

#  

history = model.fit(X_train, y_train, epochs=10)

#  

predicted_probs = model.predict(X_train)

predicted_indexes = tf.argmax(predicted_probs, axis=-1)[0]

predicted_labels = [index_to_label[i] for i in predicted_indexes]

#  

print("Predicted labels:", predicted_labels)



           TensorFlow  Keras.        Keras Sequential API.       LSTM,        13.         relu        softmax,      .

      compile.   Adam    0.001,     -,       .

      wav,     tf.audio.decode_wav      float32.        640   320.       ,   .

    MFCC (Mel-frequency cepstral coefficients)        tf.signal.mfccs_from_log_mel_spectrograms.      .

     .                .      one-hot     tf.keras.preprocessing.text.one_hot.            fit.

            predict.         .

     .


3.    .

 ,     :

 1:  

        .       , ,  , ,    . .       ,       .

 2:  

 ,   ,   . ,    ,       .      , ,    .

 3:  

 ,   ,     .          , ,       .        .

 4:  

  ,    ,  ,    .       .       ,     .

 5:  

 ,    ,       . ,   ,      ,      .   ,      ,  ,    .

        ,        ,        .            ,        :

import numpy as np

#  

ratings = np.array([

[5, 3, 0, 1],

[4, 0, 0, 1],

[1, 1, 0, 5],

[1, 0, 0, 4],

[0, 1, 5, 4],

])

#  

num_users, num_items = ratings.shape

num_factors = 2

learning_rate = 0.01

num_epochs = 1000

#     

user_matrix = np.random.rand(num_users, num_factors)

item_matrix = np.random.rand(num_factors, num_items)

#   

for epoch in range(num_epochs):

for i in range(num_users):

for j in range(num_items):

if ratings[i][j] > 0:

error = ratings[i][j]  np.dot(user_matrix[i,:], item_matrix[:,j])

user_matrix[i,:] += learning_rate * (error * item_matrix[:,j])

item_matrix[:,j] += learning_rate * (error * user_matrix[i,:])

#       

predicted_ratings = np.dot(user_matrix, item_matrix)

#     

user_id = 0

recommended_items = np.argsort(predicted_ratings[user_id])[::-1]

print("  ", user_id)

print(recommended_items)

          .                  .     ,        ,           .            .


4.     .

 .

    TensorFlow.

 ,    .         48481 .  Conv2D, BatchNormalization  MaxPooling2D      .  Flatten      .  Dense, BatchNormalization  Dropout      7  (, ,   ..).

 ,  ,    .

         .

      .

       .



import tensorflow as tf

from tensorflow import keras

from tensorflow.keras import layers



#  

model = keras.Sequential([

layers.Conv2D(32, (3, 3), activation='relu', input_shape=(48, 48, 1)),

layers.BatchNormalization(),

layers.MaxPooling2D(pool_size=(2, 2)),

layers.Dropout(0.25),

layers.Conv2D(64, (3, 3), activation='relu'),

layers.BatchNormalization(),

layers.MaxPooling2D(pool_size=(2, 2)),

layers.Dropout(0.25),

layers.Conv2D(128, (3, 3), activation='relu'),

layers.BatchNormalization(),

layers.MaxPooling2D(pool_size=(2, 2)),

layers.Dropout(0.25),

layers.Flatten(),

layers.Dense(256, activation='relu'),

layers.BatchNormalization(),

layers.Dropout(0.5),

layers.Dense(7, activation='softmax')

])

#  

model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])

#  

history = model.fit(train_data, train_labels, epochs=50, validation_data=(val_data, val_labels))

#  

test_loss, test_acc = model.evaluate(test_data, test_labels)

print('Test accuracy:', test_acc)

#  

predictions = model.predict(new_data)



            48x48 .

      32   3x3    ReLU,      48x48x1.     ,      2x2  dropout,    .

               dropout.     ,       .

        ReLU    ,    dropout.    7      softmax      7 .

     adam,   categorical_crossentropy   accuracy.        50      .

          .         .


  1 .

      ,     .  ,   ,     ,     ,        .        ,       ,    .

,   ,             ,     .    ,    ,      .     ,   ,       .

             TensorFlow   Keras.     ,  ,    .  ,          .

,   ,          ,        ,     .   ,          .

              ,       ,        .




 2.  


               ,     ,     .            ,  ,          ,     .

   ?

      ,    ,  ,     ..               .        ,       .  ,     ,        .  ,         ,             .



       .               .      :

Backpropagation (  )        .           (forward pass)    ,      (backward pass)       . Backpropagation             .



     ,    .      ,        .      ,       .

   (Stochastic Gradient Descent, SGD)   ,        .       ,     . SGD         .

       ,     .     ,      .        ,        .



       ?        ,    . ,     ,     ,           .      ,    , ,   ,       backpropagation    .       ,    .



  (. loss function)    ,         .          ,     ,    .        ,     ,    .

       . ,         - (categorical cross-entropy loss),      ,  ,    .   ,     ,      (mean squared error loss).

              .    ,   (   ),    . ,     ,          .         ,     , , Huber loss.


     :

 :        .         ,      .         .

  :      ,   .          ,        .

 :       .        ,  .

 :            .       ,        .         ,     .

  :           .     ,  ,     .

 :  ,        ,     ,    .   ,      .

 :    ,    .        ,    ,       .



 ,      ,    :

#  

from sklearn.model_selection import train_test_split

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

#   

from keras.models import Sequential

from keras.layers import Dense

model = Sequential()

model.add(Dense(32, activation='relu', input_dim=X_train.shape[1]))

model.add(Dense(16, activation='relu'))

model.add(Dense(1, activation='linear'))

#  

model.compile(loss='mean_squared_error', optimizer='adam')

#  

model.fit(X_train, y_train, epochs=100, batch_size=32)

#   

score = model.evaluate(X_test, y_test)

#  

y_pred = model.predict(X_new_data)

#  

model = Sequential()

model.add(Dense(64, activation='relu', input_dim=X_train.shape[1]))

model.add(Dense(32, activation='relu'))

model.add(Dense(16, activation='relu'))

model.add(Dense(1, activation='linear'))

model.compile(loss='mean_squared_error', optimizer='adam')

model.fit(X_train, y_train, epochs=200, batch_size=64)



  :

    train_test_split   Scikit-Learn        .

       Sequential   Keras.         32  16  ,     .        ReLU,      linear (   ).        .

   ,    "mean squared error" ( )    "adam".

      ,   fit,    (100)    (batch_size=32).

            evaluate      score.

             X_new_data.



            .       Python,   TensorFlow:



#  

import tensorflow as tf

import numpy as np

from sklearn.model_selection import train_test_split

#  

X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])

y = np.array([[0], [1], [1], [0]])

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)

#   

model = tf.keras.Sequential([

tf.keras.layers.Dense(4, input_shape=(2,), activation='relu'),

tf.keras.layers.Dense(1, activation='sigmoid')

])

#  

model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

#  

history = model.fit(X_train, y_train, epochs=1000, validation_data=(X_test, y_test))

#   

loss, accuracy = model.evaluate(X_test, y_test)

print('Loss:', loss)

print('Accuracy:', accuracy)

#  

X_new = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])

y_pred = model.predict(X_new)

print('Predictions:', y_pred)




  .


   .

   ,     (https://www.litres.ru/book/dzheyd-karter/neyroseti-nachalo-69188950/chitat-onlayn/)  .

      Visa, MasterCard, Maestro,    ,   ,     ,  PayPal, WebMoney, ., QIWI ,       .


