diff --git a/CNN+TCN+LSTM+ATTENTION NEW.ipynb b/CNN+TCN+LSTM+ATTENTION NEW.ipynb
new file mode 100644
index 0000000..b49caf3
--- /dev/null
+++ b/CNN+TCN+LSTM+ATTENTION NEW.ipynb
@@ -0,0 +1,811 @@
+{
+ "nbformat": 4,
+ "nbformat_minor": 0,
+ "metadata": {
+ "colab": {
+ "provenance": [],
+ "authorship_tag": "ABX9TyMBDiuw1IDIhMQ8URm6LC/0",
+ "include_colab_link": true
+ },
+ "kernelspec": {
+ "name": "python3",
+ "display_name": "Python 3"
+ },
+ "language_info": {
+ "name": "python"
+ }
+ },
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "view-in-github",
+ "colab_type": "text"
+ },
+ "source": [
+ "
"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "id": "FkAotuRzefyl"
+ },
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "from sklearn.model_selection import train_test_split\n",
+ "from sklearn.preprocessing import StandardScaler, LabelEncoder\n",
+ "from keras.models import Model\n",
+ "from keras.layers import Input, Conv1D, MaxPooling1D, Flatten, Dense, Dropout, concatenate\n",
+ "from keras.layers import TimeDistributed, LSTM\n",
+ "from keras.optimizers import Adam"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "import pandas as pd\n",
+ "# Replace 'your_dataset.csv' with the\n",
+ "data = pd.read_csv('PhiUSIIL_Phishing_URL_Dataset.csv')"
+ ],
+ "metadata": {
+ "id": "vo7b7h0kjTSO"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "X = data.drop(columns=['label'])\n",
+ "y = data['label']"
+ ],
+ "metadata": {
+ "id": "Q_c0I6iRjg8k"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [],
+ "metadata": {
+ "id": "8PQ0YCv0oxHm"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "label_encoders = {}\n",
+ "for column in X.select_dtypes(include='object'):\n",
+ " label_encoders[column] = LabelEncoder()\n",
+ " X[column] = label_encoders[column].fit_transform(X[column])\n",
+ "\n"
+ ],
+ "metadata": {
+ "id": "Qxfqg4UQjvDs"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Split the dataset into training and testing sets\n",
+ "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n"
+ ],
+ "metadata": {
+ "id": "tq5v4kLkjz5u"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Standardize numerical features\n",
+ "scaler = StandardScaler()\n",
+ "X_train_scaled = scaler.fit_transform(X_train.select_dtypes(include=['int64', 'float64']))\n",
+ "X_test_scaled = scaler.transform(X_test.select_dtypes(include=['int64', 'float64']))\n"
+ ],
+ "metadata": {
+ "id": "ownfWliMpGKi"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "\n",
+ "input_shape = (X_train_scaled.shape[1],) # Number of features"
+ ],
+ "metadata": {
+ "id": "9Qs7dx1Yerm-"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "!pip install tensorflow"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "4NzfcllsiwXD",
+ "outputId": "7c84da09-5736-4a3c-8e71-775b1b8b10fa"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.15.0)\n",
+ "Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0)\n",
+ "Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3)\n",
+ "Requirement already satisfied: flatbuffers>=23.5.26 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25)\n",
+ "Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.5.4)\n",
+ "Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0)\n",
+ "Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.9.0)\n",
+ "Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1)\n",
+ "Requirement already satisfied: ml-dtypes~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0)\n",
+ "Requirement already satisfied: numpy<2.0.0,>=1.23.5 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.25.2)\n",
+ "Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0)\n",
+ "Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.0)\n",
+ "Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3)\n",
+ "Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (67.7.2)\n",
+ "Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0)\n",
+ "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0)\n",
+ "Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.10.0)\n",
+ "Requirement already satisfied: wrapt<1.15,>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.14.1)\n",
+ "Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.36.0)\n",
+ "Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.62.1)\n",
+ "Requirement already satisfied: tensorboard<2.16,>=2.15 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.2)\n",
+ "Requirement already satisfied: tensorflow-estimator<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0)\n",
+ "Requirement already satisfied: keras<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0)\n",
+ "Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0)\n",
+ "Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.27.0)\n",
+ "Requirement already satisfied: google-auth-oauthlib<2,>=0.5 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (1.2.0)\n",
+ "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.6)\n",
+ "Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.31.0)\n",
+ "Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (0.7.2)\n",
+ "Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.0.2)\n",
+ "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (5.3.3)\n",
+ "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.4.0)\n",
+ "Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (4.9)\n",
+ "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (1.3.1)\n",
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.3.2)\n",
+ "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.6)\n",
+ "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2.0.7)\n",
+ "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2024.2.2)\n",
+ "Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.16,>=2.15->tensorflow) (2.1.5)\n",
+ "Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.6.0)\n",
+ "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (3.2.2)\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "from tensorflow.keras.layers import Input, Conv1D, MaxPooling1D"
+ ],
+ "metadata": {
+ "id": "1igPsnG4izcY"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "!pip install tensorflow\n",
+ "import tensorflow as tf\n",
+ "from tensorflow import keras\n",
+ "from tensorflow.keras import layers"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "S3OmZL1HyvWb",
+ "outputId": "80951bb5-52b0-4ed5-d605-2c1f1f532927"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.15.0)\n",
+ "Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0)\n",
+ "Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3)\n",
+ "Requirement already satisfied: flatbuffers>=23.5.26 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25)\n",
+ "Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.5.4)\n",
+ "Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0)\n",
+ "Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.9.0)\n",
+ "Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1)\n",
+ "Requirement already satisfied: ml-dtypes~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0)\n",
+ "Requirement already satisfied: numpy<2.0.0,>=1.23.5 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.25.2)\n",
+ "Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0)\n",
+ "Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.0)\n",
+ "Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3)\n",
+ "Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (67.7.2)\n",
+ "Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0)\n",
+ "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0)\n",
+ "Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.10.0)\n",
+ "Requirement already satisfied: wrapt<1.15,>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.14.1)\n",
+ "Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.36.0)\n",
+ "Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.62.1)\n",
+ "Requirement already satisfied: tensorboard<2.16,>=2.15 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.2)\n",
+ "Requirement already satisfied: tensorflow-estimator<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0)\n",
+ "Requirement already satisfied: keras<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0)\n",
+ "Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0)\n",
+ "Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.27.0)\n",
+ "Requirement already satisfied: google-auth-oauthlib<2,>=0.5 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (1.2.0)\n",
+ "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.6)\n",
+ "Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.31.0)\n",
+ "Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (0.7.2)\n",
+ "Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.0.2)\n",
+ "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (5.3.3)\n",
+ "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.4.0)\n",
+ "Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (4.9)\n",
+ "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (1.3.1)\n",
+ "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.3.2)\n",
+ "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.6)\n",
+ "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2.0.7)\n",
+ "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2024.2.2)\n",
+ "Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.16,>=2.15->tensorflow) (2.1.5)\n",
+ "Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.6.0)\n",
+ "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (3.2.2)\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# CNN layer\n",
+ "cnn_input = Input(shape=(None, 10))\n",
+ "cnn_layer = Conv1D(filters=64, kernel_size=3, activation='relu')(cnn_input)\n",
+ "cnn_layer = MaxPooling1D(pool_size=2)(cnn_layer)"
+ ],
+ "metadata": {
+ "id": "z1G51_JLykx7"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "\n",
+ "from tensorflow.keras.layers import Input, TimeDistributed, Dense\n",
+ "# TCN layer\n",
+ "tcn_input = Input(shape=(1, 55))\n",
+ "tcn_layer = TimeDistributed(Dense(64, activation='relu'))(tcn_input)"
+ ],
+ "metadata": {
+ "id": "uo9K23NEy8Yw"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "from tensorflow.keras.layers import Bidirectional, LSTM, Input\n",
+ "lstm_input = Input(shape=(1, 55))\n",
+ "lstm_layer = Bidirectional(LSTM(64, return_sequences=True))(lstm_input)"
+ ],
+ "metadata": {
+ "id": "ss2VAByzzC0s"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Import the Reshape layer\n",
+ "from keras.layers import Reshape\n",
+ "\n",
+ "# Reshape LSTM output to match dimensions\n",
+ "lstm_reshaped = Reshape((-1, 64))(lstm_layer)"
+ ],
+ "metadata": {
+ "id": "53YavPPGzEop"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "print(\"cnn_layer shape:\", cnn_layer.shape)\n",
+ "print(\"tcn_layer shape:\", tcn_layer.shape)\n",
+ "print(\"lstm_reshaped shape:\", lstm_reshaped.shape)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "GMY6gUwMzUSR",
+ "outputId": "21a13c09-8857-4415-f8b1-c50deb00c03f"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "cnn_layer shape: (None, None, 64)\n",
+ "tcn_layer shape: (None, 1, 64)\n",
+ "lstm_reshaped shape: (None, 2, 64)\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "!pip install -q tensorflow\n",
+ "import tensorflow as tf"
+ ],
+ "metadata": {
+ "id": "6Xcgu9Ci3nfC"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "cnn_layer = tf.keras.layers.Lambda(lambda x: tf.keras.backend.reshape(x, (-1, 1, x.shape[-1])))(cnn_layer)"
+ ],
+ "metadata": {
+ "id": "fP_1i1en39BW"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "import tensorflow as tf\n",
+ "from tensorflow.keras.layers import Reshape\n",
+ "print(\"CNN Layer Shape:\", cnn_layer.shape)\n",
+ "print(\"TCN Layer Shape:\", tcn_layer.shape)\n",
+ "print(\"LSTM Reshaped Layer Shape:\", lstm_reshaped.shape)\n"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "0BMKz9PRzmVz",
+ "outputId": "bd617b27-4823-4530-df5c-2c852765ac53"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "CNN Layer Shape: (None, 1, 64)\n",
+ "TCN Layer Shape: (None, 1, 64)\n",
+ "LSTM Reshaped Layer Shape: (None, 2, 64)\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "cnn_layer = tf.keras.layers.Reshape((1, cnn_layer.shape[-1]))(cnn_layer)\n",
+ "tcn_layer = tf.keras.layers.Reshape((1, tcn_layer.shape[-1]))(tcn_layer)"
+ ],
+ "metadata": {
+ "id": "bVBgS-tNDnz2"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "import tensorflow as tf\n",
+ "\n",
+ "# Assuming lstm_reshaped has shape (None, 2, 64)\n",
+ "lstm_reshaped = tf.keras.layers.Reshape((1, -1))(lstm_reshaped)\n"
+ ],
+ "metadata": {
+ "id": "W1Y0qLQ453IC"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "from tensorflow.keras.layers import Input, Dense, Concatenate, Conv1D, MaxPooling1D, Flatten, Dropout, LSTM, Bidirectional, Attention"
+ ],
+ "metadata": {
+ "id": "vjKnZxCzF7ou"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "cnn_layer_reshaped = Dense(128, activation=\"relu\")(cnn_layer)\n",
+ "attention = Attention()([cnn_layer_reshaped, tcn_layer, lstm_reshaped])"
+ ],
+ "metadata": {
+ "id": "gox1whorGHDb"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Merge layers\n",
+ "merged_layer = concatenate([cnn_layer, tcn_layer, lstm_reshaped, attention])\n",
+ "merged_layer = Flatten()(merged_layer)\n"
+ ],
+ "metadata": {
+ "id": "4xNtjw72GJ9n"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Fully connected layer\n",
+ "fc_layer = Dense(64, activation='relu')(merged_layer)"
+ ],
+ "metadata": {
+ "id": "b3fYJzTkGQoy"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Output layer\n",
+ "output_layer = Dense(1, activation='sigmoid')(fc_layer)"
+ ],
+ "metadata": {
+ "id": "RQru8f--GUJw"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Create model\n",
+ "model = Model(inputs=[cnn_input, tcn_input, lstm_input], outputs=output_layer)\n"
+ ],
+ "metadata": {
+ "id": "0z2fan52GYIK"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "\n",
+ "# Compile the model\n",
+ "model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])"
+ ],
+ "metadata": {
+ "id": "2akcY7QBGbnH"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "print(model.input_shape)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "chenQTMwGsbI",
+ "outputId": "56a85a66-56c8-4266-fea0-93948a8f2560"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "[(None, None, 10), (None, 1, 55), (None, 1, 55)]\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "print(X_train_scaled.shape)"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "eriUP9ViGxQR",
+ "outputId": "4b93a168-0bb9-4f9c-8d70-554eeabd44e6"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "(188636, 55)\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "X_train_scaled = X_train_scaled.reshape((-1, 1, 55))"
+ ],
+ "metadata": {
+ "id": "jxPxxmC0G1d1"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "model = keras.Sequential([\n",
+ " keras.layers.InputLayer(input_shape=(None, 55)),\n",
+ " # ...\n",
+ "])"
+ ],
+ "metadata": {
+ "id": "etjXDHUIHHvf"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "model.compile(\n",
+ " optimizer=\"adam\",\n",
+ " loss=\"binary_crossentropy\",metrics=['accuracy']\n",
+ ")"
+ ],
+ "metadata": {
+ "id": "EOo97jvHHc4D"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "model.fit(X_train_scaled, y_train, epochs=10, batch_size=64, validation_split=0.2)\n"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "QUaFmjXiG5iP",
+ "outputId": "2e762f13-93fb-462f-de52-fe3e071d66bf"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "Epoch 1/10\n",
+ "2358/2358 [==============================] - 15s 6ms/step - loss: 7.0652 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 2/10\n",
+ "2358/2358 [==============================] - 18s 7ms/step - loss: 7.0660 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 3/10\n",
+ "2358/2358 [==============================] - 12s 5ms/step - loss: 7.0656 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 4/10\n",
+ "2358/2358 [==============================] - 9s 4ms/step - loss: 7.0649 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 5/10\n",
+ "2358/2358 [==============================] - 10s 4ms/step - loss: 7.0645 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 6/10\n",
+ "2358/2358 [==============================] - 9s 4ms/step - loss: 7.0653 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 7/10\n",
+ "2358/2358 [==============================] - 10s 4ms/step - loss: 7.0653 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 8/10\n",
+ "2358/2358 [==============================] - 10s 4ms/step - loss: 7.0651 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 9/10\n",
+ "2358/2358 [==============================] - 9s 4ms/step - loss: 7.0659 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n",
+ "Epoch 10/10\n",
+ "2358/2358 [==============================] - 11s 5ms/step - loss: 7.0651 - accuracy: 0.0871 - val_loss: 7.0660 - val_accuracy: 0.0854\n"
+ ]
+ },
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "execution_count": 190
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "# Step 4: Evaluate the model\n",
+ "loss, accuracy = model.evaluate([X_test_scaled], y_test)\n",
+ "print(f'Test Accuracy: {accuracy}')"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "1nyxfj59M1pN",
+ "outputId": "1cf7a942-b1c1-495b-9eb0-a2033346ad27"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "1474/1474 [==============================] - 2s 1ms/step - loss: 6.0942 - accuracy: 0.0837\n",
+ "Test Accuracy: 0.08365317434072495\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "model.save('url_malicious_model.h5')"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "H_v7zsBd5lbg",
+ "outputId": "d4fa9812-7137-4a34-fe58-8f0225149839"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "name": "stderr",
+ "text": [
+ "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py:3103: UserWarning: You are saving your model as an HDF5 file via `model.save()`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')`.\n",
+ " saving_api.save_model(\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ " #Step 5: Make predictions\n",
+ "def predict_malicious(url_features):\n",
+ " scaled_features = scaler.transform(url_features)\n",
+ " prediction = model.predict([scaled_features, scaled_features, scaled_features])\n",
+ " if prediction >= 0.5:\n",
+ " return \"Malicious\"\n",
+ " else:\n",
+ " return \"Not Malicious\""
+ ],
+ "metadata": {
+ "id": "OcnhoY_ANIur"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "source": [],
+ "metadata": {
+ "id": "HnRRxWbdPujY"
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "from tensorflow.keras.preprocessing.text import Tokenizer\n",
+ "from tensorflow.keras.preprocessing.sequence import pad_sequences\n",
+ "tokenizer = Tokenizer()\n",
+ "max_len = 100\n",
+ "def preprocess_url(url):\n",
+ " # Tokenize and pad the URL\n",
+ " url_seq = tokenizer.texts_to_sequences([url])\n",
+ " url_padded = pad_sequences(url_seq, maxlen=max_len)\n",
+ " return url_padded\n",
+ "\n",
+ "def predict_url_maliciousness(url):\n",
+ " preprocessed_url = preprocess_url(url)\n",
+ " prediction = model.predict(preprocessed_url)[0][0]\n",
+ " if prediction >= 0.5:\n",
+ " return \"Malicious\"\n",
+ " else:\n",
+ " return \"Not Malicious\"\n",
+ "\n",
+ "# Example usage:\n",
+ "user_url = input(\"Enter a URL to check if it's malicious: \")\n",
+ "result = predict_url_maliciousness(user_url)\n",
+ "print(f\"The URL '{user_url}' is predicted to be: {result}\")\n"
+ ],
+ "metadata": {
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ },
+ "id": "kjz1IjfXzEsm",
+ "outputId": "68753e04-48c6-4694-f82f-69dcf0a46b10"
+ },
+ "execution_count": null,
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Enter a URL to check if it's malicious: wwww.google.com\n"
+ ]
+ },
+ {
+ "output_type": "stream",
+ "name": "stderr",
+ "text": [
+ "WARNING:tensorflow:6 out of the last 11 calls to .predict_function at 0x7f6afa551fc0> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has reduce_retracing=True option that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.\n"
+ ]
+ },
+ {
+ "output_type": "stream",
+ "name": "stdout",
+ "text": [
+ "1/1 [==============================] - 0s 83ms/step\n",
+ "The URL 'wwww.google.com' is predicted to be: Not Malicious\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "source": [],
+ "metadata": {
+ "id": "B_9ZiYQ62kaf"
+ },
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "model.save('url_malicious_model.h5')\n"
+ ],
+ "metadata": {
+ "id": "P4b-zYdy1d9k"
+ },
+ "execution_count": null,
+ "outputs": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/Queue b/Queue
index 569c984..ce7de35 100644
--- a/Queue
+++ b/Queue
@@ -1,73 +1,118 @@
-public class QueuesArray {
- private int data[];
- private int front;
- private int rear;
- private int size;
-
- public QueuesArray(int n) {
- data = new int[n];
- front = 0;
- rear = 0;
- size = 0;
- }
-
- public int length() {
- return size;
- }
-
- public boolean isEmpty() {
- return size == 0;
- }
-
- public boolean isFull() {
- return size == data.length;
- }
-
- public void enqueue(int e) {
- if(isFull()) {
- System.out.println("Queue is Full");
- return;
- }
- else {
- data[rear] = e;
- rear = rear + 1;
- size = size + 1;
- }
- }
-
- public int dequeue() {
- if(isEmpty()) {
- System.out.println("Queue is Empty");
- return -1;
- }
- else {
- int e = data[front];
- front = front + 1;
- size = size - 1;
- return e;
- }
- }
-
- public void display() {
- for(int i=front;i
+#include
+
+#define MAX 6
+
+int intArray[MAX];
+int front = 0;
+int rear = -1;
+int itemCount = 0;
+
+int peek() {
+ return intArray[front];
+}
+
+bool isEmpty() {
+ return itemCount == 0;
+}
+
+bool isFull() {
+ return itemCount == MAX;
+}
+
+int size() {
+ return itemCount;
+}
+
+void insert(int data) {
+
+ if(!isFull()) {
+
+ if(rear == MAX-1) {
+ rear = -1;
+ }
+
+ intArray[++rear] = data;
+ itemCount++;
+ }
+}
+
+int removeData() {
+ int data = intArray[front++];
+
+ if(front == MAX) {
+ front = 0;
+ }
+
+ itemCount--;
+ return data;
+}
+
+int main() {
+ /* insert 5 items */
+ insert(3);
+ insert(5);
+ insert(9);
+ insert(1);
+ insert(12);
+
+ // front : 0
+ // rear : 4
+ // ------------------
+ // index : 0 1 2 3 4
+ // ------------------
+ // queue : 3 5 9 1 12
+ insert(15);
+
+ // front : 0
+ // rear : 5
+ // ---------------------
+ // index : 0 1 2 3 4 5
+ // ---------------------
+ // queue : 3 5 9 1 12 15
+
+ if(isFull()) {
+ printf("Queue is full!\n");
+ }
+
+ // remove one item
+ int num = removeData();
+
+ printf("Element removed: %d\n",num);
+ // front : 1
+ // rear : 5
+ // -------------------
+ // index : 1 2 3 4 5
+ // -------------------
+ // queue : 5 9 1 12 15
+
+ // insert more items
+ insert(16);
+
+ // front : 1
+ // rear : -1
+ // ----------------------
+ // index : 0 1 2 3 4 5
+ // ----------------------
+ // queue : 16 5 9 1 12 15
+
+ // As queue is full, elements will not be inserted.
+ insert(17);
+ insert(18);
+
+ // ----------------------
+ // index : 0 1 2 3 4 5
+ // ----------------------
+ // queue : 16 5 9 1 12 15
+ printf("Element at front: %d\n",peek());
+
+ printf("----------------------\n");
+ printf("index : 5 4 3 2 1 0\n");
+ printf("----------------------\n");
+ printf("Queue: ");
+
+ while(!isEmpty()) {
+ int n = removeData();
+ printf("%d ",n);
+ }
}