How to reduce dimentionality using PCA in Python?
0

How to reduce dimentionality using PCA in Python?

This recipe helps you reduce dimentionality using PCA in Python
In [1]:
## How to reduce dimentionality using PCA in Python
def Snippet_123():
    print()
    print(format('How to reduce dimentionality using PCA in Python','*^82'))

    import warnings
    warnings.filterwarnings("ignore")

    # load libraries
    from sklearn.preprocessing import StandardScaler
    from sklearn.decomposition import PCA
    from sklearn import datasets

    # Load Digits Data And Make Sparse
    digits = datasets.load_digits()

    # Standardize the feature matrix
    X = StandardScaler().fit_transform(digits.data)
    print(); print(X)

    # Conduct Principal Component Analysis
    # Create a PCA that will retain 85% of the variance
    pca = PCA(n_components=0.85, whiten=True)

    # Conduct PCA
    X_pca = pca.fit_transform(X)
    print(); print(X_pca)

    # Show results
    print('Original number of features:', X.shape[1])
    print('Reduced number of features:', X_pca.shape[1])

    # Create a PCA with 2 components
    pca = PCA(n_components=2, whiten=True)
    # Conduct PCA
    X_pca = pca.fit_transform(X)
    print(); print(X_pca)
    # Show results
    print('Original number of features:', X.shape[1])
    print('Reduced number of features:', X_pca.shape[1])

Snippet_123()
*****************How to reduce dimentionality using PCA in Python*****************

[[ 0.         -0.33501649 -0.04308102 ... -1.14664746 -0.5056698
  -0.19600752]
 [ 0.         -0.33501649 -1.09493684 ...  0.54856067 -0.5056698
  -0.19600752]
 [ 0.         -0.33501649 -1.09493684 ...  1.56568555  1.6951369
  -0.19600752]
 ...
 [ 0.         -0.33501649 -0.88456568 ... -0.12952258 -0.5056698
  -0.19600752]
 [ 0.         -0.33501649 -0.67419451 ...  0.8876023  -0.5056698
  -0.19600752]
 [ 0.         -0.33501649  1.00877481 ...  0.8876023  -0.26113572
  -0.19600752]]

[[ 0.70631939 -0.39512814 -1.73816236 ...  0.60320435 -0.94455291
  -0.60204272]
 [ 0.21732591  0.38276482  1.72878893 ... -0.56722002  0.61131544
   1.02457999]
 [ 0.4804351  -0.13130437  1.33172761 ... -1.51284419 -0.48470912
  -0.52826811]
 ...
 [ 0.37732433 -0.0612296   1.0879821  ...  0.04925597  0.29271531
  -0.33891255]
 [ 0.39705007 -0.15768102 -1.08160094 ...  1.31785641  0.38883981
  -1.21854835]
 [-0.46407544 -0.92213976  0.12493334 ... -1.27242756 -0.34190284
  -1.17852306]]
Original number of features: 64
Reduced number of features: 25

[[ 0.70632396 -0.3951369 ]
 [ 0.21732429  0.38276531]
 [ 0.48042968 -0.1313031 ]
 ...
 [ 0.37732239 -0.06123449]
 [ 0.3970504  -0.15768443]
 [-0.46407124 -0.92214378]]
Original number of features: 64
Reduced number of features: 2