Advertisement
Guest User

Untitled

a guest
Jun 19th, 2019
92
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.68 KB | None | 0 0
  1. {
  2. "cells": [
  3. {
  4. "cell_type": "code",
  5. "execution_count": 24,
  6. "metadata": {
  7. "collapsed": true
  8. },
  9. "outputs": [
  10. {
  11. "data": {
  12. "text/plain": [
  13. "<Figure size 432x288 with 0 Axes>"
  14. ]
  15. },
  16. "execution_count": 0,
  17. "metadata": {},
  18. "output_type": "execute_result"
  19. }
  20. ],
  21. "source": [
  22. "# <editor-fold desc=\"import libraries\">\n",
  23. "import copy\n",
  24. "from IPython.display import display, HTML\n",
  25. "from pprint import pprint\n",
  26. "from joblib import dump, load\n",
  27. "import tensorflow as tf\n",
  28. "from keras.backend.tensorflow_backend import set_session\n",
  29. "from sklearn.ensemble import RandomForestClassifier\n",
  30. "from sklearn.model_selection import train_test_split, GridSearchCV, cross_validate, cross_val_score\n",
  31. "from sklearn.pipeline import make_pipeline\n",
  32. "from sklearn.preprocessing import StandardScaler\n",
  33. "from sklearn.metrics import auc\n",
  34. "from sklearn.feature_selection import SelectFromModel\n",
  35. "config = tf.ConfigProto()\n",
  36. "config.gpu_options.allow_growth = True\n",
  37. "set_session(tf.Session(config=config))\n",
  38. "import pickle\n",
  39. "from keras.layers import Dense, Conv2D, BatchNormalization, Activation\n",
  40. "from keras.layers import AveragePooling2D, Input, Flatten\n",
  41. "from keras.optimizers import Adam\n",
  42. "from keras.callbacks import ModelCheckpoint, LearningRateScheduler\n",
  43. "from keras.callbacks import ReduceLROnPlateau\n",
  44. "from keras.preprocessing.image import ImageDataGenerator\n",
  45. "from keras.regularizers import l2\n",
  46. "from keras.models import Model\n",
  47. "import warnings\n",
  48. "warnings.simplefilter(action='ignore', category=FutureWarning)\n",
  49. "import time\n",
  50. "start_time = time.time()\n",
  51. "import numpy as np\n",
  52. "\n",
  53. "from keras.callbacks import Callback\n",
  54. "from keras import Sequential, metrics\n",
  55. "from idlelib import history\n",
  56. "from keras.utils import np_utils\n",
  57. "from keras.utils.vis_utils import plot_model\n",
  58. "from keras.datasets import mnist\n",
  59. "from keras.preprocessing.image import ImageDataGenerator\n",
  60. "from keras import initializers\n",
  61. "import keras\n",
  62. "from keras.layers import Dense, Conv2D, MaxPooling2D, Dropout, Flatten\n",
  63. "from keras import backend as K\n",
  64. "K.set_image_dim_ordering('th')\n",
  65. "from keras.models import load_model\n",
  66. "\n",
  67. "from statsmodels.tsa.api import ExponentialSmoothing, SimpleExpSmoothing, Holt\n",
  68. "import matplotlib.pyplot as plt\n",
  69. "np.set_printoptions(threshold=np.inf)\n",
  70. "import h5py\n",
  71. "import scipy.misc\n",
  72. "from sklearn.metrics import confusion_matrix, accuracy_score, make_scorer, average_precision_score, f1_score, \\\n",
  73. " precision_score, recall_score, precision_recall_curve\n",
  74. "import os,sys\n",
  75. "import pandas as pd\n",
  76. "# import seaborn as sns\n",
  77. "# sns.set_style(\"darkgrid\")\n",
  78. "from imblearn.under_sampling import RandomUnderSampler\n",
  79. "plt.gray()\n",
  80. "# </editor-fold>"
  81. ]
  82. },
  83. {
  84. "cell_type": "code",
  85. "execution_count": 25,
  86. "metadata": {},
  87. "outputs": [
  88. {
  89. "name": "stdout",
  90. "output_type": "stream",
  91. "text": [
  92. "CWD = C:\\Users\\302566153\\PycharmProjects\\dan\\fd/\n"
  93. ]
  94. },
  95. {
  96. "name": "stdout",
  97. "output_type": "stream",
  98. "text": [
  99. " Time V1 V2 V3 V4 V5 V6 V7 \\\n0 0.0 -1.359807 -0.072781 2.536347 1.378155 -0.338321 0.462388 0.239599 \n1 0.0 1.191857 0.266151 0.166480 0.448154 0.060018 -0.082361 -0.078803 \n2 0.0 -1.358354 -1.340163 1.773209 0.379780 -0.503198 1.800499 0.791461 \n3 0.0 -0.966272 -0.185226 1.792993 -0.863291 -0.010309 1.247203 0.237609 \n4 0.0 -1.158233 0.877737 1.548718 0.403034 -0.407193 0.095921 0.592941 \n\n V8 V9 ... V21 V22 V23 V24 V25 \\\n0 0.098698 0.363787 ... -0.018307 0.277838 -0.110474 0.066928 0.128539 \n1 0.085102 -0.255425 ... -0.225775 -0.638672 0.101288 -0.339846 0.167170 \n2 0.247676 -1.514654 ... 0.247998 0.771679 0.909412 -0.689281 -0.327642 \n3 0.377436 -1.387024 ... -0.108300 0.005274 -0.190321 -1.175575 0.647376 \n4 -0.270533 0.817739 ... -0.009431 0.798278 -0.137458 0.141267 -0.206010 \n\n V26 V27 V28 Amount Class \n0 -0.189115 0.133558 -0.021053 0.244964 0 \n1 0.125895 -0.008983 0.014724 -0.342475 0 \n2 -0.139097 -0.055353 -0.059752 1.160686 0 \n3 -0.221929 0.062723 0.061458 0.140534 0 \n4 0.502292 0.219422 0.215153 -0.073403 0 \n\n[5 rows x 31 columns]\n"
  100. ]
  101. }
  102. ],
  103. "source": [
  104. "CWD=(os.getcwd()+'/')\n",
  105. "CWD=r'C:\\Users\\302566153\\PycharmProjects\\dan\\fd/'\n",
  106. "print('CWD = ',CWD)\n",
  107. "df = pd.read_csv(CWD + \"creditcard.csv\")\n",
  108. "df.Time=np.floor(df.Time/3600)\n",
  109. "df['Amount'] = StandardScaler().fit_transform(df['Amount'].values.reshape((-1,1)))\n",
  110. "\n",
  111. "\n",
  112. "# print(df.dtypes)\n",
  113. "display((df.head()))\n",
  114. "# print (df.head().to_html())\n",
  115. "# print(df.columns)\n",
  116. "# print(df.columns.values)"
  117. ]
  118. },
  119. {
  120. "cell_type": "code",
  121. "execution_count": 26,
  122. "metadata": {},
  123. "outputs": [],
  124. "source": []
  125. }
  126. ],
  127. "metadata": {
  128. "kernelspec": {
  129. "display_name": "Python 2",
  130. "language": "python",
  131. "name": "python2"
  132. },
  133. "language_info": {
  134. "codemirror_mode": {
  135. "name": "ipython",
  136. "version": 2
  137. },
  138. "file_extension": ".py",
  139. "mimetype": "text/x-python",
  140. "name": "python",
  141. "nbconvert_exporter": "python",
  142. "pygments_lexer": "ipython2",
  143. "version": "2.7.6"
  144. }
  145. },
  146. "nbformat": 4,
  147. "nbformat_minor": 0
  148. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement