Skip to content

Instantly share code, notes, and snippets.

@simplymathematics
Created October 18, 2024 14:32
Show Gist options
  • Select an option

  • Save simplymathematics/cd95fab52572d8c8fafb6bbee082a40d to your computer and use it in GitHub Desktop.

Select an option

Save simplymathematics/cd95fab52572d8c8fafb6bbee082a40d to your computer and use it in GitHub Desktop.
NCD-Kernelized-Classifier
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 116,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.datasets import fetch_20newsgroups\n",
"from sklearn.svm import SVC\n",
"import pandas as pd\n",
"import numpy as np\n",
"import seaborn as sns\n",
"import matplotlib.pyplot as plt\n",
"from sklearn.model_selection import train_test_split\n",
"from sklearn.metrics import accuracy_score\n",
"from sklearn.metrics.pairwise import rbf_kernel, linear_kernel, polynomial_kernel\n",
"from sklearn.linear_model import LogisticRegression\n",
"from sklearn.neighbors import KNeighborsClassifier\n",
"\n",
"from gzip_classifier import ncd"
]
},
{
"cell_type": "code",
"execution_count": 81,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['alt.atheism',\n",
" 'comp.graphics',\n",
" 'comp.os.ms-windows.misc',\n",
" 'comp.sys.ibm.pc.hardware',\n",
" 'comp.sys.mac.hardware',\n",
" 'comp.windows.x',\n",
" 'misc.forsale',\n",
" 'rec.autos',\n",
" 'rec.motorcycles',\n",
" 'rec.sport.baseball',\n",
" 'rec.sport.hockey',\n",
" 'sci.crypt',\n",
" 'sci.electronics',\n",
" 'sci.med',\n",
" 'sci.space',\n",
" 'soc.religion.christian',\n",
" 'talk.politics.guns',\n",
" 'talk.politics.mideast',\n",
" 'talk.politics.misc',\n",
" 'talk.religion.misc']"
]
},
"execution_count": 81,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"newgroups = fetch_20newsgroups(subset='all')\n",
"\n",
"newgroups.target_names"
]
},
{
"cell_type": "code",
"execution_count": 156,
"metadata": {},
"outputs": [],
"source": [
"# Load the data\n",
"# ddos = pd.read_csv(\"raw_data/ddos_undersampled_10000.csv\")\n",
"# y = ddos.pop(\"Label\")\n",
"# X = ddos\n",
"\n",
"\n",
"# select only 2 categories to speed up the computation\n",
"remove = ('headers', 'footers')\n",
"categories = ['rec.sport.baseball', 'comp.sys.mac.hardware']\n",
"newsgroups = fetch_20newsgroups(subset='all', remove=remove, categories=categories)\n",
"X = pd.DataFrame(newsgroups.data)\n",
"y = newsgroups.target\n",
"\n",
"\n",
"\n",
"# Split the data\n",
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=100, train_size=200)\n",
"# Turn each row of X_train, X_test into a string\n",
"X_train = X_train.apply(lambda x: str(x), axis=1)\n",
"X_test = X_test.apply(lambda x: str(x), axis=1)\n",
"# Turn into numpy arrays\n",
"X_train = X_train.to_numpy()\n",
"X_test = X_test.to_numpy()\n",
"# Compute the NCD matrices\n",
"D_train = np.zeros((len(X_train), len(X_train)))\n",
"D_test = np.zeros((len(X_test), len(X_train)))\n",
"\n",
"for i in range(len(X_train)):\n",
" for j in range(len(X_train)):\n",
" D_train[i, j] = ncd(X_train[i], X_train[j])\n",
"\n",
"for i in range(len(X_test)):\n",
" for j in range(len(X_train)):\n",
" D_test[i, j] = ncd(X_test[i], X_train[j])\n",
"\n",
"X_train = D_train\n",
"X_test = D_test\n",
"\n",
"# Linear kernel\n",
"X_train_linear = linear_kernel(X_train)\n",
"X_test_linear = linear_kernel(X_test, X_train)\n",
"X_train_self_linear = X_train @ X_train.T\n",
"X_test_self_linear = X_test @ X_train.T\n",
"assert np.allclose(X_train_linear, X_train_self_linear), \"Linear kernel not equal\"\n",
"assert np.allclose(X_test_linear, X_test_self_linear), \"Linear kernel not equal\"\n",
"\n",
"# RBF kernel\n",
"X_train_rbf = rbf_kernel(X_train, gamma=0.5)\n",
"X_test_rbf = rbf_kernel(X_test, X_train, gamma=0.5)\n",
"X_train_rbf_self = np.exp(-.5 * np.linalg.norm(X_train[:, None] - X_train[None, :], axis=2) ** 2)\n",
"X_test_rbf_self = np.exp(-.5 * np.linalg.norm(X_test[:, None] - X_train[None, :], axis=2) ** 2)\n",
"assert np.allclose(X_train_rbf, X_train_rbf_self), \"RBF kernel not equal\"\n",
"assert np.allclose(X_test_rbf, X_test_rbf_self), \"RBF kernel not equal\"\n",
"\n",
"# Polynomial kernel\n",
"X_train_poly = polynomial_kernel(X_train, X_train, degree=3, coef0=0.5, gamma=1)\n",
"X_test_poly = polynomial_kernel(X_test, X_train, degree=3, coef0=0.5, gamma = 1)\n",
"X_train_poly_self = (X_train @ X_train.T + 0.5) ** 3\n",
"X_test_poly_self = (X_test @ X_train.T + 0.5) ** 3\n",
"assert np.allclose(X_train_poly, X_train_poly_self), \" kernel not equal\"\n",
"assert np.allclose(X_test_poly, X_test_poly_self), \"Polynomial kernel not equal\""
]
},
{
"cell_type": "code",
"execution_count": 157,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy when using `precomputed` option: 0.61\n",
"Accuracy for rbf SVM: 0.61\n",
"Accuracy when using self-implemented rbf kernel 0.61\n",
"********************************************************************************\n",
"Accuracy when using \"precomputed\" option 0.61\n",
"Accuracy for self implemented linear kernel svc: 0.61\n",
"Accuracy for linear kernel svc: 0.61\n",
"********************************************************************************\n",
"Accuracy when using \"precomputed\" option 0.66\n",
"Accuracy for polynomial kernel svc: 0.66\n",
"Accuracy when using self-implemented polynomial kernel 0.66\n"
]
}
],
"source": [
"# kernelize the data using the RBF kernel\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_rbf, y_train)\n",
"y_pred = model.predict(X_test_rbf)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy when using `precomputed` option: {accuracy}')\n",
"\n",
"# Train the kernelized SVM with RBF\n",
"model = SVC(kernel='rbf', gamma=.5)\n",
"model.fit(X_train, y_train)\n",
"# Predict the test data\n",
"y_pred = model.predict(X_test)\n",
"# Calculate the accuracy\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy for rbf SVM: {accuracy}')\n",
"\n",
"\n",
"\n",
"\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_rbf_self, y_train)\n",
"y_pred = model.predict(X_test_rbf_self)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy when using self-implemented rbf kernel {accuracy}')\n",
"print(\"*\"*80)\n",
"\n",
"\n",
"# Compare the linear SVM with the kernelized SVM using the linear kernel\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_linear, y_train)\n",
"y_pred = model.predict(X_test_linear)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy when using \"precomputed\" option {accuracy}')\n",
"\n",
"\n",
"model = SVC(kernel='linear')\n",
"model.fit(X_train, y_train)\n",
"y_pred = model.predict(X_test)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy for self implemented linear kernel svc: {accuracy}')\n",
"\n",
"model = SVC(kernel='linear')\n",
"model.fit(X_train, y_train)\n",
"y_pred = model.predict(X_test)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy for linear kernel svc: {accuracy}')\n",
"print(\"*\"*80)\n",
"\n",
"# Compare the polynomial kernel with precompute vs kernel='poly'\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_poly, y_train)\n",
"y_pred = model.predict(X_test_poly)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy when using \"precomputed\" option {accuracy}')\n",
"\n",
"model = SVC(kernel='poly', degree=3, gamma=0.5)\n",
"model.fit(X_train, y_train)\n",
"y_pred = model.predict(X_test)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy for polynomial kernel svc: {accuracy}')\n",
"\n",
"\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_poly_self, y_train)\n",
"y_pred = model.predict(X_test_poly_self)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy when using self-implemented polynomial kernel {accuracy}')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"As we can see above, precomputing the kernel using the functions provided in pairwise metrics is identical to using the kernelized SVM (i.e. changing the kernel kwarg in SVC). \n",
"However, using the precomputed mnethod, we can apply this same transformation and use it in other models."
]
},
{
"cell_type": "code",
"execution_count": 158,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy on distance matrix: 0.61\n",
"Accuracy after rbf kernel: 0.62\n",
"Accuracy after linear kernel: 0.6\n"
]
}
],
"source": [
"# Logistic Regression using the distance data\n",
"model = LogisticRegression()\n",
"model.fit(X_train, y_train)\n",
"y_pred = model.predict(X_test)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy on distance matrix: {accuracy}')\n",
"# Logistic Regression using the RBF kernel\n",
"model = LogisticRegression()\n",
"model.fit(X_train_rbf, y_train)\n",
"y_pred = model.predict(X_test_rbf)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy after rbf kernel: {accuracy}')\n",
"# Logistic Regression using the linear kernel\n",
"model = LogisticRegression()\n",
"model.fit(X_train_linear, y_train)\n",
"y_pred = model.predict(X_test_linear)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy after linear kernel: {accuracy}')\n"
]
},
{
"cell_type": "code",
"execution_count": 159,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Accuracy on distance matrix: 0.52\n",
"Accuracy after rbf linear: 0.59\n",
"Accuracy after linear kernel: 0.6\n"
]
}
],
"source": [
"# KNN using the distance matrix\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train, y_train)\n",
"y_pred = model.predict(X_test)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy on distance matrix: {accuracy}')\n",
"# KNN using the RBF kernel\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_rbf, y_train)\n",
"y_pred = model.predict(X_test_rbf)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy after rbf linear: {accuracy}')\n",
"# KNN using the linear kernel\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_linear, y_train)\n",
"y_pred = model.predict(X_test_linear)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Accuracy after linear kernel: {accuracy}')\n"
]
},
{
"cell_type": "code",
"execution_count": 160,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"********************************************************************************\n",
"Using absolute value transform\n",
"SVC Accuracy after absolute value transform: 0.37\n",
"Logistic Regression Accuracy after absolute value transform: 0.61\n",
"KNN Accuracy after absolute value transform: 0.52\n",
"********************************************************************************\n",
"Using exp transform\n",
"SVC Accuracy after exp transform: 0.6\n",
"Logistic Regression Accuracy after exp transform: 0.6\n",
"KNN Accuracy after exp transform: 0.56\n",
"********************************************************************************\n",
"Using log1p transform\n",
"SVC Accuracy after log transform: 0.37\n",
"Logistic Regression Accuracy after log transform: 0.6\n",
"KNN Accuracy after log transform: 0.54\n",
"********************************************************************************\n",
"Using logit transform\n",
"SVC Accuracy after logit transform: 0.4\n",
"Logistic Regression Accuracy after logit transform: 0.59\n",
"KNN Accuracy after logit transform: 0.6\n",
"********************************************************************************\n",
"Using square transform\n",
"SVC Accuracy after square transform: 0.63\n",
"Logistic Regression Accuracy after square transform: 0.61\n",
"KNN Accuracy after square transform: 0.53\n",
"********************************************************************************\n",
"Using sigmoid transform\n",
"SVC Accuracy after sigmoid transform: 0.56\n",
"Logistic Regression Accuracy after sigmoid transform: 0.6\n",
"KNN Accuracy after sigmoid transform: 0.53\n"
]
},
{
"ename": "",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n",
"\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n",
"\u001b[1;31mClick <a href='https://aka.ms/vscodeJupyterKernelCrash'>here</a> for more info. \n",
"\u001b[1;31mView Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
]
}
],
"source": [
"# instead of kernels, we will transform the distance matrix\n",
"print(\"*\"*80)\n",
"print(\"Using absolute value transform\")\n",
"X_train_abs = np.abs(X_train)\n",
"X_test_abs = np.abs(X_test)\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_abs, y_train)\n",
"y_pred = model.predict(X_test_abs)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'SVC Accuracy after absolute value transform: {accuracy}')\n",
"model = LogisticRegression()\n",
"model.fit(X_train_abs, y_train)\n",
"y_pred = model.predict(X_test_abs)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Logistic Regression Accuracy after absolute value transform: {accuracy}')\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_abs, y_train)\n",
"y_pred = model.predict(X_test_abs)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'KNN Accuracy after absolute value transform: {accuracy}')\n",
"\n",
"# e^-D\n",
"print(\"*\"*80)\n",
"print(\"Using exp transform\")\n",
"X_train_exp = np.exp(-X_train)\n",
"X_test_exp = np.exp(-X_test)\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_exp, y_train)\n",
"y_pred = model.predict(X_test_exp)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'SVC Accuracy after exp transform: {accuracy}')\n",
"model = LogisticRegression()\n",
"model.fit(X_train_exp, y_train)\n",
"y_pred = model.predict(X_test_exp)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Logistic Regression Accuracy after exp transform: {accuracy}')\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_exp, y_train)\n",
"y_pred = model.predict(X_test_exp)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'KNN Accuracy after exp transform: {accuracy}')\n",
"\n",
"# log(1+D)\n",
"print(\"*\"*80)\n",
"print(\"Using log1p transform\")\n",
"X_train_log = np.log1p(X_train)\n",
"X_test_log = np.log1p(X_test)\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_log, y_train)\n",
"y_pred = model.predict(X_test_log)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'SVC Accuracy after log transform: {accuracy}')\n",
"model = LogisticRegression() \n",
"model.fit(X_train_log, y_train)\n",
"y_pred = model.predict(X_test_log)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Logistic Regression Accuracy after log transform: {accuracy}')\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_log, y_train)\n",
"y_pred = model.predict(X_test_log)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'KNN Accuracy after log transform: {accuracy}')\n",
"\n",
"# logit(D)\n",
"print(\"*\"*80)\n",
"print(\"Using logit transform\")\n",
"X_train_logit = np.log(X_train / (1 - X_train))\n",
"X_test_logit = np.log(X_test / (1 - X_test))\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_logit, y_train)\n",
"y_pred = model.predict(X_test_logit)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'SVC Accuracy after logit transform: {accuracy}')\n",
"model = LogisticRegression()\n",
"model.fit(X_train_logit, y_train)\n",
"y_pred = model.predict(X_test_logit)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Logistic Regression Accuracy after logit transform: {accuracy}')\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_logit, y_train)\n",
"y_pred = model.predict(X_test_logit)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'KNN Accuracy after logit transform: {accuracy}')\n",
"\n",
"# D**2\n",
"print(\"*\"*80)\n",
"print(\"Using square transform\")\n",
"X_train_sq = X_train ** 2\n",
"X_test_sq = X_test ** 2\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_sq, y_train)\n",
"y_pred = model.predict(X_test_sq)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'SVC Accuracy after square transform: {accuracy}') \n",
"model = LogisticRegression()\n",
"model.fit(X_train_sq, y_train)\n",
"y_pred = model.predict(X_test_sq)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Logistic Regression Accuracy after square transform: {accuracy}')\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_sq, y_train)\n",
"y_pred = model.predict(X_test_sq)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'KNN Accuracy after square transform: {accuracy}')\n",
"\n",
"\n",
"# sigmoid(D)\n",
"print(\"*\"*80)\n",
"print(\"Using sigmoid transform\")\n",
"X_train_sigmoid = 1 / (1 + np.exp(-X_train))\n",
"X_test_sigmoid = 1 / (1 + np.exp(-X_test))\n",
"model = SVC(kernel='precomputed')\n",
"model.fit(X_train_sigmoid, y_train)\n",
"y_pred = model.predict(X_test_sigmoid)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'SVC Accuracy after sigmoid transform: {accuracy}')\n",
"model = LogisticRegression()\n",
"model.fit(X_train_sigmoid, y_train)\n",
"y_pred = model.predict(X_test_sigmoid)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'Logistic Regression Accuracy after sigmoid transform: {accuracy}')\n",
"model = KNeighborsClassifier(n_neighbors=5)\n",
"model.fit(X_train_sigmoid, y_train)\n",
"y_pred = model.predict(X_test_sigmoid)\n",
"accuracy = accuracy_score(y_test, y_pred)\n",
"print(f'KNN Accuracy after sigmoid transform: {accuracy}')\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment