diff --git a/tools/kal-test/bin/graph/betweenness_run.sh b/tools/kal-test/bin/graph/betweenness_run.sh index b5d1ad2e0bab74b83d02325c4053fba77a9f7e1c..f06e9329ddaa9e5a67d881089a811eeaf7b83e54 100644 --- a/tools/kal-test/bin/graph/betweenness_run.sh +++ b/tools/kal-test/bin/graph/betweenness_run.sh @@ -160,6 +160,7 @@ if [ ${is_raw} == "no" ]; then ./lib/kal-test_${scala_version_val}-0.1.jar ${dataset_name} ${is_raw} ${num_partitions_val} ${data_path_val} ${is_check} ${output_path} ${gt_path_val} | tee ./log/log else spark-submit \ + --class org.opensource.betweenness_open.Program \ --master yarn \ --deploy-mode ${deploy_mode_val} \ --name "Betweenness_${dataset_name}_opensource" \ @@ -176,20 +177,12 @@ else --conf spark.network.timeout=1000000s \ --conf spark.executor.heartbeatInterval=100000s \ --conf spark.rpc.message.maxSize=1000 \ - --jars "./lib/scopt_2.11-3.2.0.jar" \ - ./lib/hbse_2.11-0.1.jar \ - -m yarn \ - -s ${graph_split_val} \ - -n ${num_partitions_val} \ - -i ${data_path_val} \ - -o ${output_path} \ - -g ${gt_path_val} \ - -p ${pivots_val} \ - -b ${iteration_val} > betweenness_temp.log - CostTime=$(cat betweenness_temp.log |grep "CostTime of Top-K" | awk '{print $6}') - Accuracy=$(cat betweenness_temp.log |grep "Accuracy of Top-K" | awk '{print $6}') + --jars "./lib/scopt_2.12-3.5.0.jar" \ + ./lib/betweenness_open-1.0-spark3.3.0.jar bnc yarn ${graph_split_val} ${data_path_val} ${output_path} ${gt_path_val} 10000 ${num_partitions_val} ${pivots_val} ${iteration_val} 10000 | tee ./log/betweenness_temp.log + CostTime=$(cat betweenness_temp.log |grep "finished. costTime =" | awk '{print $5}') + Accuracy=$(cat betweenness_temp.log |grep "finished. costTime =" | awk '{print $8}') currentTime=$(date "+%Y%m%d_H%M%S") - rm -rf betweenness_temp.log +# rm -rf ./log/betweenness_temp.log echo -e "algorithmName: Betweenness\ncostTime: $CostTime\ndatasetName: ${dataset_name}\nisRaw: 'yes'\nAccuracy: ${Accuracy}\ntestcaseType: Betweenness_opensource_${dataset_name}\n" > ./report/"Betweenness_${currentTime}.yml" echo "Exec Successful: end." > ./log/log fi \ No newline at end of file diff --git a/tools/kal-test/bin/graph/bfs_run.sh b/tools/kal-test/bin/graph/bfs_run.sh index 4c43e983f876ffb16d9471cdeae828b2a997ca88..be15d5a2e21b96a25d3fd12feadd5db3b524a35d 100644 --- a/tools/kal-test/bin/graph/bfs_run.sh +++ b/tools/kal-test/bin/graph/bfs_run.sh @@ -140,10 +140,10 @@ do --conf "spark.executor.extraJavaOptions=${extra_java_options_val}" \ --conf "spark.driver.extraJavaOptions=-Xms80g" \ --conf spark.locality.wait.node=0 \ - --jars "lib/scopt_2.10-3.5.0.jar" \ - --driver-class-path "lib/scopt_2.10-3.5.0.jar" \ - --conf "spark.executor.extraClassPath=scopt_2.10-3.5.0.jar" \ - ./lib/bfs_2.10-0.1.2.jar \ + --jars "lib/scopt_${scala_version_val}-3.5.0.jar" \ + --driver-class-path "lib/scopt_${scala_version_val}-3.5.0.jar" \ + --conf "spark.executor.extraClassPath=scopt_${scala_version_val}-3.5.0.jar" \ + ./lib/xinjiang_Poc-0.1.0.jar \ -g 'EdgeList' \ -p 'EdgePartition2D' \ -n ${num_partitions_val} \ diff --git a/tools/kal-test/bin/graph/louvain_run.sh b/tools/kal-test/bin/graph/louvain_run.sh index 2e3421e9b314698a2f10c056278a5d3653bacf2d..31378c49ab27a141d7580e845bbbd5f0b265fee8 100644 --- a/tools/kal-test/bin/graph/louvain_run.sh +++ b/tools/kal-test/bin/graph/louvain_run.sh @@ -140,7 +140,7 @@ else --conf spark.shuffle.manager=SORT \ --conf spark.shuffle.blockTransferService=nio \ --conf spark.locality.wait.node=0 \ - ./lib/louvain_2.11-0.1.0_open_sourced.jar yarn ${data_path_val} ${community_output} ${modularity_output} " " ${num_partitions_val} 2000 > louvain_temp.log + ./lib/louvain_2.12-0.1.0_open_sourced.jar yarn ${data_path_val} ${community_output} ${modularity_output} " " ${num_partitions_val} > louvain_temp.log costTime=$(cat louvain_temp.log |grep "cost_time:" | awk '{print $2}') modularity=$(cat louvain_temp.log |grep "modularity:" | awk '{print $2}') currentTime=$(date "+%Y%m%d_H%M%S") diff --git a/tools/kal-test/bin/ml/dbscan_run.sh b/tools/kal-test/bin/ml/dbscan_run.sh index 650e092685dc08ceeefa8a582246e11c7e792aa5..2ace9a8a5f3c2881213a1764390c18d43de8326a 100644 --- a/tools/kal-test/bin/ml/dbscan_run.sh +++ b/tools/kal-test/bin/ml/dbscan_run.sh @@ -142,12 +142,12 @@ else hdfs dfs -mkdir -p ${hdfsJarPath} hdfs dfs -ls ${hdfsJarPath} if [ $? -eq 0 ];then - hdfs dfs -rm -r -f ${hdfsJarPath}/alitouka_dbscan_2.11-0.1.jar - hdfs dfs -put ./lib/alitouka_dbscan_2.11-0.1.jar ${hdfsJarPath} + hdfs dfs -rm -r -f ${hdfsJarPath}/alitouka_dbscan_${scala_version_val}-0.1.jar + hdfs dfs -put ./lib/alitouka_dbscan_${scala_version_val}-0.1.jar ${hdfsJarPath} fi spark-submit \ - --jars "lib/scopt_2.11-3.5.0.jar" \ + --jars "lib/scopt_${scala_version_val}-3.5.0.jar" \ --class org.alitouka.spark.dbscan.DbscanDriver \ --deploy-mode ${deploy_mode_val} \ --name "alitouka_DBSCAN_${model_conf}" \ @@ -159,7 +159,7 @@ else --master ${master_val} \ --conf "spark.executor.extraJavaOptions=${extra_java_options_val}" \ --conf "spark.driver.maxResultSize=${driver_max_result_size_val}" \ - ${hdfsJarPath}/alitouka_dbscan_2.11-0.1.jar --ds-master ${master_val} --ds-jar ${hdfsJarPath}/alitouka_dbscan_${scala_version_val}-0.1.jar --ds-input ${data_path_val} --ds-output ${outputPath} --eps ${epsilon_val} --numPts ${min_points_val} >dbscan_tmp.log + ${hdfsJarPath}/alitouka_dbscan_${scala_version_val}-0.1.jar --ds-master ${master_val} --ds-jar ${hdfsJarPath}/alitouka_dbscan_${scala_version_val}-0.1.jar --ds-input ${data_path_val} --ds-output ${outputPath} --eps ${epsilon_val} --numPts ${min_points_val} >dbscan_tmp.log CostTime=$(cat dbscan_tmp.log | grep "train total" | awk '{print $3}') currentTime=$(date "+%Y%m%d_%H%M%S") rm -rf dbscan_tmp.log diff --git a/tools/kal-test/src/main/scala/com/bigdata/graph/KcoreMain.scala b/tools/kal-test/src/main/scala/com/bigdata/graph/KcoreMain.scala index ae2c8124d7fc5beb00aea40766d2d1fc3414da26..81ad18fc80012ecc9a7715f8b83ee1123d7f5148 100644 --- a/tools/kal-test/src/main/scala/com/bigdata/graph/KcoreMain.scala +++ b/tools/kal-test/src/main/scala/com/bigdata/graph/KcoreMain.scala @@ -38,7 +38,7 @@ class KCoreRawParams extends Serializable { @BeanProperty var testcaseType: String = _ } -object KCore { +object KCore extends Serializable { val initialMsg = "-10" def mergeMsg(msg1: String, msg2: String): String = msg1 + ":" + msg2