|
431 | 431 | "print(classification_report(y_test, predictions))\n", |
432 | 432 | "print (\"Confusion Matrix:\\n\",cm)\n", |
433 | 433 | "\n", |
| 434 | + "auc = roc_auc_score(y_test,y_scores[:,1])\n", |
| 435 | + "print('\\nAUC: ' + str(auc))\n", |
| 436 | + "\n", |
434 | 437 | "# calculate ROC curve\n", |
435 | 438 | "y_scores = model.predict_proba(X_test)\n", |
436 | 439 | "fpr, tpr, thresholds = roc_curve(y_test, y_scores[:,1])\n", |
|
444 | 447 | "plt.xlabel('False Positive Rate')\n", |
445 | 448 | "plt.ylabel('True Positive Rate')\n", |
446 | 449 | "plt.title('ROC Curve')\n", |
447 | | - "plt.show()" |
| 450 | + "plt.show()\n", |
| 451 | + "\n" |
448 | 452 | ] |
449 | 453 | }, |
450 | 454 | { |
|
495 | 499 | "print('Accuracy: ', accuracy_score(y_test, predictions))\n", |
496 | 500 | "print(classification_report(y_test, predictions))\n", |
497 | 501 | "print (\"Confusion Matrix:\\n\",cm)\n", |
| 502 | + "auc = roc_auc_score(y_test,y_scores[:,1])\n", |
| 503 | + "print('\\nAUC: ' + str(auc))\n", |
498 | 504 | "\n", |
499 | 505 | "# calculate ROC curve\n", |
500 | 506 | "y_scores = model.predict_proba(X_test)\n", |
|
0 commit comments