diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..62a150f --- /dev/null +++ b/.gitignore @@ -0,0 +1,18 @@ +.gradle/ +build/ +!gradle/wrapper/gradle-wrapper.jar +.idea/ + +docker/*.jar + +docker/output/* +!docker/output/.keep + +docker/reports/* +!docker/reports/.keep + +input-data/* +!input-data/.keep + +*.log +logs/ diff --git a/0.csv b/0.csv deleted file mode 100644 index 2f7dfb7..0000000 --- a/0.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eae61feac01cb36de9d6dd148a5b3fb76ad506fd8b283b20306df16f246e511e -size 3406784 diff --git a/1.csv b/1.csv deleted file mode 100644 index 3a258b0..0000000 --- a/1.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7b2681409003d07d3ca063eb34339a0cdea9b688d16723b0e284091afd6bf806 -size 7078520 diff --git a/2.csv b/2.csv deleted file mode 100644 index 4cdd100..0000000 --- a/2.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ea9a50aa300dfe7e766eb419b2a963d31cfe68644acb23f654df7abe852d3a76 -size 10737171 diff --git a/3.csv b/3.csv deleted file mode 100644 index afbe78d..0000000 --- a/3.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6268fae7b50879a151a6f6de4852e4f39d2e22a8315290e87dcda71f4e10b866 -size 14530705 diff --git a/4.csv b/4.csv deleted file mode 100644 index 9ff08df..0000000 --- a/4.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:b4adf0d42097f1b2c9e68731460c5a7c52cb7ac7238addab7a796817cee9d00b -size 18299520 diff --git a/5.csv b/5.csv deleted file mode 100644 index 3980291..0000000 --- a/5.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:212b6d6b07197921eaedaa271d781431b8bb034c5328622d63231a2967df1702 -size 22053240 diff --git a/6.csv b/6.csv deleted file mode 100644 index 5906dc6..0000000 --- a/6.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:81f65085c6ce29a8f766244dc4b21d41d565ea3d6231b3b1c0b6739d67cd1d53 -size 25790880 diff --git a/7.csv b/7.csv deleted file mode 100644 index df43af3..0000000 --- a/7.csv +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4e73ff71a37438ec216fc522d77e3902c5670e24a917d0be047ed101dbeea914 -size 29524261 diff --git a/README.md b/README.md index 948dc85..84b20f4 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ +[![Review Assignment Due Date](https://classroom.github.com/assets/deadline-readme-button-22041afd0340ce965d47ae6ef1cefeee28c7c493a6346c4f15d667ab976d596c.svg)](https://classroom.github.com/a/uyodabcP) ## Лабораторная работа: Реализация MapReduce для анализа данных о продажах с ипользованием HADOOP!!! # Цель работы diff --git a/build.gradle.kts b/build.gradle.kts new file mode 100644 index 0000000..c7e7e51 --- /dev/null +++ b/build.gradle.kts @@ -0,0 +1,42 @@ +plugins { + java +} + +group = "com.vitaya.para" +version = "1.0-SNAPSHOT" + +repositories { + mavenCentral() +} + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 +} + +dependencies { + compileOnly("org.apache.hadoop:hadoop-client:3.2.1") + compileOnly("org.apache.hadoop:hadoop-common:3.2.1") + compileOnly("org.apache.hadoop:hadoop-mapreduce-client-core:3.2.1") +} + +tasks.jar { + manifest { + attributes( + "Main-Class" to "com.vitaya.para.sales.SalesDriver" + ) + } + archiveBaseName.set("sales-analytics") + + duplicatesStrategy = DuplicatesStrategy.EXCLUDE +} + +tasks.register("copyJarToDocker") { + dependsOn(tasks.jar) + from(tasks.jar.get().archiveFile) + into("docker") +} + +tasks.build { + dependsOn("copyJarToDocker") +} diff --git a/docker/Dockerfile.build b/docker/Dockerfile.build new file mode 100644 index 0000000..0abbf61 --- /dev/null +++ b/docker/Dockerfile.build @@ -0,0 +1,15 @@ +FROM gradle:7.6-jdk8 + +WORKDIR /app + +# Copy gradle files first for caching +COPY build.gradle.kts settings.gradle.kts ./ + +# Copy source code +COPY src ./src + +# Build the JAR +RUN ./gradlew clean build --no-daemon +RUN gradle clean build --no-daemon + +# The JAR will be in /app/build/libs/ diff --git a/docker/build.sh b/docker/build.sh new file mode 100644 index 0000000..65d3704 --- /dev/null +++ b/docker/build.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Build the sales-analytics JAR using Docker (Java 8 environment) +# This script builds the project in a container and copies the JAR to the docker directory + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_DIR="$(dirname "$SCRIPT_DIR")" + +echo "=== Building Sales Analytics JAR ===" +echo "Project directory: $PROJECT_DIR" + +# Build in Docker container +docker build -t sales-analytics-builder -f "$SCRIPT_DIR/Dockerfile.build" "$PROJECT_DIR" + +# Extract the JAR from the container +CONTAINER_ID=$(docker create sales-analytics-builder) +docker cp "$CONTAINER_ID:/app/build/libs/sales-analytics-1.0-SNAPSHOT.jar" "$SCRIPT_DIR/" +docker rm "$CONTAINER_ID" + +echo "" +echo "=== Build complete ===" +echo "JAR file: $SCRIPT_DIR/sales-analytics-1.0-SNAPSHOT.jar" diff --git a/docker/compose.yaml b/docker/compose.yaml new file mode 100644 index 0000000..0528a8a --- /dev/null +++ b/docker/compose.yaml @@ -0,0 +1,98 @@ +services: + namenode: + image: bde2020/hadoop-namenode:2.0.0-hadoop3.2.1-java8 + container_name: namenode + hostname: namenode + ports: + - "9870:9870" + - "9000:9000" + environment: + - CLUSTER_NAME=sales-analytics + env_file: + - ./hadoop.env + volumes: + - namenode_data:/hadoop/dfs/name + networks: + - hadoop + + datanode: + image: bde2020/hadoop-datanode:2.0.0-hadoop3.2.1-java8 + container_name: datanode + hostname: datanode + environment: + - SERVICE_PRECONDITION=namenode:9870 + env_file: + - ./hadoop.env + volumes: + - datanode_data:/hadoop/dfs/data + networks: + - hadoop + depends_on: + - namenode + + resourcemanager: + image: bde2020/hadoop-resourcemanager:2.0.0-hadoop3.2.1-java8 + container_name: resourcemanager + hostname: resourcemanager + ports: + - "8088:8088" + environment: + - SERVICE_PRECONDITION=namenode:9000 namenode:9870 datanode:9864 + env_file: + - ./hadoop.env + volumes: + - ./sales-analytics-1.0-SNAPSHOT.jar:/opt/hadoop/jobs/sales-analytics-1.0-SNAPSHOT.jar:ro + - ../input-data:/opt/hadoop/input:ro + - ./reports:/opt/hadoop/reports + - ./output:/opt/hadoop/output + - ./run-job.sh:/opt/hadoop/run-job.sh:ro + networks: + - hadoop + depends_on: + - namenode + - datanode + + nodemanager: + image: bde2020/hadoop-nodemanager:2.0.0-hadoop3.2.1-java8 + container_name: nodemanager + hostname: nodemanager + ports: + - "8042:8042" + environment: + - SERVICE_PRECONDITION=namenode:9000 namenode:9870 datanode:9864 resourcemanager:8088 + env_file: + - ./hadoop.env + networks: + - hadoop + depends_on: + - namenode + - datanode + - resourcemanager + + historyserver: + image: bde2020/hadoop-historyserver:2.0.0-hadoop3.2.1-java8 + container_name: historyserver + hostname: historyserver + ports: + - "8188:8188" + environment: + - SERVICE_PRECONDITION=namenode:9000 namenode:9870 datanode:9864 resourcemanager:8088 + env_file: + - ./hadoop.env + volumes: + - historyserver_data:/hadoop/yarn/timeline + networks: + - hadoop + depends_on: + - namenode + - datanode + - resourcemanager + +volumes: + namenode_data: + datanode_data: + historyserver_data: + +networks: + hadoop: + driver: bridge diff --git a/docker/hadoop.env b/docker/hadoop.env new file mode 100644 index 0000000..253d34f --- /dev/null +++ b/docker/hadoop.env @@ -0,0 +1,48 @@ +# Core configuration +CORE_CONF_fs_defaultFS=hdfs://namenode:9000 +CORE_CONF_hadoop_http_staticuser_user=root +CORE_CONF_hadoop_proxyuser_hue_hosts=* +CORE_CONF_hadoop_proxyuser_hue_groups=* +CORE_CONF_io_compression_codecs=org.apache.hadoop.io.compress.SnappyCodec + +# HDFS configuration +HDFS_CONF_dfs_webhdfs_enabled=true +HDFS_CONF_dfs_permissions_enabled=false +HDFS_CONF_dfs_replication=1 +HDFS_CONF_dfs_namenode_datanode_registration_ip___hostname___check=false + +# YARN configuration +YARN_CONF_yarn_log___aggregation___enable=true +YARN_CONF_yarn_log_server_url=http://historyserver:8188/applicationhistory/logs/ +YARN_CONF_yarn_resourcemanager_recovery_enabled=true +YARN_CONF_yarn_resourcemanager_store_class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore +YARN_CONF_yarn_resourcemanager_scheduler_class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler +YARN_CONF_yarn_scheduler_capacity_root_default_maximum___allocation___mb=8192 +YARN_CONF_yarn_scheduler_capacity_root_default_maximum___allocation___vcores=4 +YARN_CONF_yarn_resourcemanager_fs_state___store_uri=/rmstate +YARN_CONF_yarn_resourcemanager_system___metrics___publisher_enabled=true +YARN_CONF_yarn_resourcemanager_hostname=resourcemanager +YARN_CONF_yarn_resourcemanager_address=resourcemanager:8032 +YARN_CONF_yarn_resourcemanager_scheduler_address=resourcemanager:8030 +YARN_CONF_yarn_resourcemanager_resource__tracker_address=resourcemanager:8031 +YARN_CONF_yarn_timeline___service_enabled=true +YARN_CONF_yarn_timeline___service_generic___application___history_enabled=true +YARN_CONF_yarn_timeline___service_hostname=historyserver +YARN_CONF_mapreduce_map_output_compress=true +YARN_CONF_mapred_map_output_compress_codec=org.apache.hadoop.io.compress.SnappyCodec +YARN_CONF_yarn_nodemanager_resource_memory___mb=8192 +YARN_CONF_yarn_nodemanager_resource_cpu___vcores=4 +YARN_CONF_yarn_nodemanager_disk___health___checker_max___disk___utilization___per___disk___percentage=98.5 +YARN_CONF_yarn_nodemanager_remote___app___log___dir=/app-logs +YARN_CONF_yarn_nodemanager_aux___services=mapreduce_shuffle + +# MapReduce configuration +MAPRED_CONF_mapreduce_framework_name=yarn +MAPRED_CONF_mapred_child_java_opts=-Xmx1536m +MAPRED_CONF_mapreduce_map_memory_mb=2048 +MAPRED_CONF_mapreduce_reduce_memory_mb=2048 +MAPRED_CONF_mapreduce_map_java_opts=-Xmx1536m +MAPRED_CONF_mapreduce_reduce_java_opts=-Xmx1536m +MAPRED_CONF_yarn_app_mapreduce_am_env=HADOOP_MAPRED_HOME=/opt/hadoop-3.2.1/ +MAPRED_CONF_mapreduce_map_env=HADOOP_MAPRED_HOME=/opt/hadoop-3.2.1/ +MAPRED_CONF_mapreduce_reduce_env=HADOOP_MAPRED_HOME=/opt/hadoop-3.2.1/ diff --git a/docker/output/.keep b/docker/output/.keep new file mode 100644 index 0000000..e69de29 diff --git a/docker/reports/.keep b/docker/reports/.keep new file mode 100644 index 0000000..e69de29 diff --git a/docker/run-job.sh b/docker/run-job.sh new file mode 100644 index 0000000..a265a12 --- /dev/null +++ b/docker/run-job.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +# Sales Analytics MapReduce Job Runner + +set -e + +NUM_REDUCERS=${NUM_REDUCERS:-2} + +JAR_PATH="/opt/hadoop/jobs/sales-analytics-1.0-SNAPSHOT.jar" +INPUT_LOCAL="/opt/hadoop/input" +OUTPUT_LOCAL="/opt/hadoop/output" +HDFS_INPUT="/input" +HDFS_OUTPUT="/output" + +echo "=== Sales Analytics MapReduce Job ===" +echo "Number of reducers: $NUM_REDUCERS" +echo "" + +# Wait for HDFS to be ready +echo "Waiting for HDFS..." +until hdfs dfsadmin -safemode wait 2>/dev/null; do + sleep 5 +done +echo "HDFS is ready." + +# Setup HDFS +hdfs dfs -mkdir -p $HDFS_INPUT +hdfs dfs -rm -r -f $HDFS_INPUT/* 2>/dev/null || true +hdfs dfs -rm -r -f $HDFS_OUTPUT 2>/dev/null || true + +# Copy input data +echo "Copying input data to HDFS..." +hdfs dfs -put $INPUT_LOCAL/*.csv $HDFS_INPUT/ + +# Run MapReduce job +echo "" +hadoop jar $JAR_PATH $HDFS_INPUT $HDFS_OUTPUT $NUM_REDUCERS + +# Get results and sort by revenue (descending) +echo "" +echo "=== Results ===" +hdfs dfs -cat $HDFS_OUTPUT/part-* | tee $OUTPUT_LOCAL/results.txt + +echo "" +echo "Results saved to: $OUTPUT_LOCAL/results.txt" diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..1b33c55 Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..ca025c8 --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100644 index 0000000..23d15a9 --- /dev/null +++ b/gradlew @@ -0,0 +1,251 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH="\\\"\\\"" + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..db3a6ac --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,94 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH= + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/input-data/.keep b/input-data/.keep new file mode 100644 index 0000000..e69de29 diff --git a/run.txt b/run.txt new file mode 100644 index 0000000..c320966 --- /dev/null +++ b/run.txt @@ -0,0 +1,9 @@ +cd docker +./build.sh + +docker-compose up -d + +docker exec resourcemanager bash /opt/hadoop/run-job.sh + +cat docker/output/results.txt +cat docker/reports/metrics.txt \ No newline at end of file diff --git a/settings.gradle.kts b/settings.gradle.kts new file mode 100644 index 0000000..66e32db --- /dev/null +++ b/settings.gradle.kts @@ -0,0 +1 @@ +rootProject.name = "lab3-vitaya-para" diff --git a/src/main/java/com/vitaya/para/sales/DescendingDoubleComparator.java b/src/main/java/com/vitaya/para/sales/DescendingDoubleComparator.java new file mode 100644 index 0000000..6209ab4 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/DescendingDoubleComparator.java @@ -0,0 +1,21 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.io.WritableComparator; + +public class DescendingDoubleComparator extends WritableComparator { + + public DescendingDoubleComparator() { + super(DoubleWritable.class, true); + } + + @Override + @SuppressWarnings("rawtypes") + public int compare(WritableComparable a, WritableComparable b) { + DoubleWritable d1 = (DoubleWritable) a; + DoubleWritable d2 = (DoubleWritable) b; + + return d2.compareTo(d1); + } +} diff --git a/src/main/java/com/vitaya/para/sales/SalesDriver.java b/src/main/java/com/vitaya/para/sales/SalesDriver.java new file mode 100644 index 0000000..5ede4e5 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/SalesDriver.java @@ -0,0 +1,188 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.UUID; + +public class SalesDriver extends Configured implements Tool { + + private static final String METRICS_FILE = "/opt/hadoop/reports/metrics.txt"; + private static final int DEFAULT_NUM_REDUCERS = 2; + + public static void main(String[] args) throws Exception { + int exitCode = ToolRunner.run(new Configuration(), new SalesDriver(), args); + System.exit(exitCode); + } + + @Override + public int run(String[] args) throws Exception { + if (args.length < 2) { + System.err.println("Usage: SalesDriver [numReducers]"); + return 1; + } + + Path inputPath = new Path(args[0]); + Path outputPath = new Path(args[1]); + int numReducers = args.length > 2 ? Integer.parseInt(args[2]) : DEFAULT_NUM_REDUCERS; + + Path tempPath = new Path("/tmp/sales_temp_" + UUID.randomUUID().toString()); + + Configuration conf = getConf(); + FileSystem fs = FileSystem.get(conf); + + // Delete output path if exists + if (fs.exists(outputPath)) { + fs.delete(outputPath, true); + } + if (fs.exists(tempPath)) { + fs.delete(tempPath, true); + } + + System.out.println("=== Sales Analytics MapReduce ==="); + System.out.println("Input: " + inputPath); + System.out.println("Output: " + outputPath); + System.out.println("Number of reducers: " + numReducers); + System.out.println(); + + long totalStartTime = System.currentTimeMillis(); + + System.out.println("Aggregation"); + long job1StartTime = System.currentTimeMillis(); + + boolean job1Success = runAggregationJob(conf, inputPath, tempPath, numReducers); + + long job1Time = System.currentTimeMillis() - job1StartTime; + System.out.println("Job 1 completed in " + job1Time + " ms"); + + if (!job1Success) { + System.err.println("Job 1 (Aggregation) failed!"); + fs.delete(tempPath, true); + return 1; + } + + System.out.println(); + System.out.println("Sorting"); + long job2StartTime = System.currentTimeMillis(); + + boolean job2Success = runSortingJob(conf, tempPath, outputPath); + + long job2Time = System.currentTimeMillis() - job2StartTime; + System.out.println("Job 2 completed in " + job2Time + " ms"); + + fs.delete(tempPath, true); + + if (!job2Success) { + System.err.println("Sorting failed!"); + return 1; + } + + long totalTime = System.currentTimeMillis() - totalStartTime; + + System.out.println(); + System.out.println("=== Performance Summary ==="); + System.out.println("Aggregation: " + job1Time + " ms"); + System.out.println("Sorting: " + job2Time + " ms"); + System.out.println("Total time: " + totalTime + " ms"); + System.out.println("Number of reducers: " + numReducers); + + writeMetrics(numReducers, job1Time, job2Time, totalTime); + + return 0; + } + + private boolean runAggregationJob(Configuration conf, Path inputPath, Path outputPath, int numReducers) + throws IOException, ClassNotFoundException, InterruptedException { + + Job job = Job.getInstance(conf, "Sales Aggregation"); + job.setJarByClass(SalesDriver.class); + + // Mapper + job.setMapperClass(SalesMapper.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(SalesWritable.class); + + // Combiner (same as Reducer for local aggregation) + job.setCombinerClass(SalesReducer.class); + + // Reducer + job.setReducerClass(SalesReducer.class); + job.setNumReduceTasks(numReducers); + + // Output + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(SalesWritable.class); + + FileInputFormat.addInputPath(job, inputPath); + FileOutputFormat.setOutputPath(job, outputPath); + + return job.waitForCompletion(true); + } + + private boolean runSortingJob(Configuration conf, Path inputPath, Path outputPath) + throws IOException, ClassNotFoundException, InterruptedException { + + Job job = Job.getInstance(conf, "Sales Sorting"); + job.setJarByClass(SalesDriver.class); + + job.setMapperClass(SortMapper.class); + job.setMapOutputKeyClass(DoubleWritable.class); + job.setMapOutputValueClass(Text.class); + + job.setSortComparatorClass(DescendingDoubleComparator.class); + + job.setReducerClass(SortReducer.class); + job.setNumReduceTasks(1); + + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(NullWritable.class); + + FileInputFormat.addInputPath(job, inputPath); + FileOutputFormat.setOutputPath(job, outputPath); + + return job.waitForCompletion(true); + } + + private void writeMetrics(int numReducers, long job1Time, long job2Time, long totalTime) { + try { + File metricsFile = new File(METRICS_FILE); + File parentDir = metricsFile.getParentFile(); + + if (parentDir != null && !parentDir.exists()) { + parentDir.mkdirs(); + } + + try (PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(metricsFile, true)))) { + String timestamp = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()); + writer.println("=== Run: " + timestamp + " ==="); + writer.println("Number of reducers: " + numReducers); + writer.println("Aggregation: " + job1Time + " ms"); + writer.println("Sorting: " + job2Time + " ms"); + writer.println("Total time: " + totalTime + " ms"); + writer.println(); + } + + System.out.println("Metrics appended to: " + METRICS_FILE); + + } catch (IOException e) { + System.err.println("Warning: Failed to write metrics file: " + e.getMessage()); + } + } +} diff --git a/src/main/java/com/vitaya/para/sales/SalesMapper.java b/src/main/java/com/vitaya/para/sales/SalesMapper.java new file mode 100644 index 0000000..38234c8 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/SalesMapper.java @@ -0,0 +1,56 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Mapper; + +import java.io.IOException; + +public class SalesMapper extends Mapper { + + public enum SalesCounter { + MALFORMED_ROWS, + VALID_ROWS + } + + private final Text categoryKey = new Text(); + private final SalesWritable salesValue = new SalesWritable(); + + @Override + protected void map(LongWritable key, Text value, Context context) + throws IOException, InterruptedException { + + String line = value.toString().trim(); + + // Skip empty lines and header + if (line.isEmpty() || line.startsWith("transaction_id") || line.startsWith("id")) { + return; + } + + String[] fields = line.split(","); + + // Expected format: transaction_id,product_id,category,price,quantity + // Minimum required fields: at least 5 + if (fields.length < 5) { + context.getCounter(SalesCounter.MALFORMED_ROWS).increment(1); + return; + } + + try { + String category = fields[2].trim(); + double price = Double.parseDouble(fields[3].trim()); + int quantity = Integer.parseInt(fields[4].trim()); + + double revenue = price * quantity; + + categoryKey.set(category); + salesValue.set(revenue, quantity); + + context.write(categoryKey, salesValue); + context.getCounter(SalesCounter.VALID_ROWS).increment(1); + + } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { + context.getCounter(SalesCounter.MALFORMED_ROWS).increment(1); + } + } +} diff --git a/src/main/java/com/vitaya/para/sales/SalesReducer.java b/src/main/java/com/vitaya/para/sales/SalesReducer.java new file mode 100644 index 0000000..9e0a095 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/SalesReducer.java @@ -0,0 +1,27 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Reducer; + +import java.io.IOException; + +public class SalesReducer extends Reducer { + + private final SalesWritable result = new SalesWritable(); + + @Override + protected void reduce(Text key, Iterable values, Context context) + throws IOException, InterruptedException { + + double totalRevenue = 0.0; + int totalQuantity = 0; + + for (SalesWritable value : values) { + totalRevenue += value.getRevenue(); + totalQuantity += value.getQuantity(); + } + + result.set(totalRevenue, totalQuantity); + context.write(key, result); + } +} diff --git a/src/main/java/com/vitaya/para/sales/SalesWritable.java b/src/main/java/com/vitaya/para/sales/SalesWritable.java new file mode 100644 index 0000000..9af6a99 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/SalesWritable.java @@ -0,0 +1,61 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.io.Writable; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +public class SalesWritable implements Writable { + + private double revenue; + private int quantity; + + public SalesWritable() { + this.revenue = 0.0; + this.quantity = 0; + } + + public SalesWritable(double revenue, int quantity) { + this.revenue = revenue; + this.quantity = quantity; + } + + public double getRevenue() { + return revenue; + } + + public void setRevenue(double revenue) { + this.revenue = revenue; + } + + public int getQuantity() { + return quantity; + } + + public void setQuantity(int quantity) { + this.quantity = quantity; + } + + public void set(double revenue, int quantity) { + this.revenue = revenue; + this.quantity = quantity; + } + + @Override + public void write(DataOutput out) throws IOException { + out.writeDouble(revenue); + out.writeInt(quantity); + } + + @Override + public void readFields(DataInput in) throws IOException { + revenue = in.readDouble(); + quantity = in.readInt(); + } + + @Override + public String toString() { + return String.format("%.2f\t%d", revenue, quantity); + } +} diff --git a/src/main/java/com/vitaya/para/sales/SortMapper.java b/src/main/java/com/vitaya/para/sales/SortMapper.java new file mode 100644 index 0000000..e7cbb59 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/SortMapper.java @@ -0,0 +1,37 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Mapper; + +import java.io.IOException; + +public class SortMapper extends Mapper { + + private final DoubleWritable revenueKey = new DoubleWritable(); + + @Override + protected void map(LongWritable key, Text value, Context context) + throws IOException, InterruptedException { + + String line = value.toString().trim(); + if (line.isEmpty()) { + return; + } + + String[] parts = line.split("\t"); + if (parts.length < 2) { + return; + } + + try { + double revenue = Double.parseDouble(parts[1].trim()); + revenueKey.set(revenue); + + context.write(revenueKey, value); + + } catch (NumberFormatException e) { + } + } +} diff --git a/src/main/java/com/vitaya/para/sales/SortReducer.java b/src/main/java/com/vitaya/para/sales/SortReducer.java new file mode 100644 index 0000000..5802bd3 --- /dev/null +++ b/src/main/java/com/vitaya/para/sales/SortReducer.java @@ -0,0 +1,20 @@ +package com.vitaya.para.sales; + +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Reducer; + +import java.io.IOException; + +public class SortReducer extends Reducer { + + @Override + protected void reduce(DoubleWritable key, Iterable values, Context context) + throws IOException, InterruptedException { + + for (Text value : values) { + context.write(value, NullWritable.get()); + } + } +}