From 99f6e716c605cffee15fd77d8dad89f2798d5719 Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Tue, 9 Jul 2024 14:23:07 +0000 Subject: [PATCH 1/9] [ALGOS-263] feat(algos): add folder algorithms_test which contains the scripts to test the algorithms; --- algorithms_test/1_dataset.sh | 72 ++ algorithms_test/2_setup.sh | 172 ++++ algorithms_test/3_run.sh | 189 +++++ algorithms_test/4_summary.sh | 118 +++ algorithms_test/ReadMe.md | 733 ++++++++++++++++++ algorithms_test/config/1_dataset.json | 16 + algorithms_test/config/2_setup.json | 92 +++ algorithms_test/config/3_run.json | 621 +++++++++++++++ algorithms_test/config/3_run_one.json | 34 + .../gsql/MyGraph/1_create_schema.gsql | 17 + .../gsql/MyGraph/2_create_loading_job.gsql | 13 + .../gsql/MyGraph2/1_create_schema.gsql | 17 + .../gsql/MyGraph2/2_create_loading_job.gsql | 13 + algorithms_test/mem/1_start.sh | 10 + algorithms_test/mem/2_peak.sh | 8 + algorithms_test/mem/3_reset.sh | 44 ++ algorithms_test/mem/4_stop.sh | 3 + algorithms_test/mem/peak.awk | 25 + algorithms_test/mem/run_free.sh | 8 + .../tools/search_for_gsql_files.sh | 16 + 20 files changed, 2221 insertions(+) create mode 100755 algorithms_test/1_dataset.sh create mode 100755 algorithms_test/2_setup.sh create mode 100755 algorithms_test/3_run.sh create mode 100755 algorithms_test/4_summary.sh create mode 100644 algorithms_test/ReadMe.md create mode 100644 algorithms_test/config/1_dataset.json create mode 100644 algorithms_test/config/2_setup.json create mode 100644 algorithms_test/config/3_run.json create mode 100644 algorithms_test/config/3_run_one.json create mode 100644 algorithms_test/gsql/MyGraph/1_create_schema.gsql create mode 100644 algorithms_test/gsql/MyGraph/2_create_loading_job.gsql create mode 100644 algorithms_test/gsql/MyGraph2/1_create_schema.gsql create mode 100644 algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql create mode 100755 algorithms_test/mem/1_start.sh create mode 100755 algorithms_test/mem/2_peak.sh create mode 100755 algorithms_test/mem/3_reset.sh create mode 100755 algorithms_test/mem/4_stop.sh create mode 100644 algorithms_test/mem/peak.awk create mode 100755 algorithms_test/mem/run_free.sh create mode 100755 algorithms_test/tools/search_for_gsql_files.sh diff --git a/algorithms_test/1_dataset.sh b/algorithms_test/1_dataset.sh new file mode 100755 index 00000000..7a965977 --- /dev/null +++ b/algorithms_test/1_dataset.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +# Main function +main() { + # Check if required commands are available + if ! command -v jq &> /dev/null; then + echo "Error: jq is not installed." + exit 1 + fi + + # Read the JSON configuration file + dir=$(cd "$(dirname "$0")"; pwd) + config_file="${dir}/config/1_dataset.json" + + # Check if the configuration file exists + if [ ! -f "$config_file" ]; then + echo "Configuration file not found: $config_file" + exit 1 + fi + + # Extract general settings + default_directory=$(jq -r '.general_settings.default_directory' "$config_file") + default_directory=${default_directory/#\~/$HOME} + + # Iterate over each dataset + datasets=$(jq -r '.datasets | to_entries[] | @base64' "$config_file") + + # Decode each dataset entry and process it + echo "$datasets" | while IFS= read -r dataset_b64; do + dataset=$(echo "${dataset_b64}" | base64 --decode) + dataset_name=$(echo "$dataset" | jq -r '.key') + download_link=$(echo "$dataset" | jq -r '.value.download_link') + directory=$(echo "$dataset" | jq -r '.value.directory // empty') + directory=${directory/#\~/$HOME} + directory=${directory:-$default_directory} + top_level_dir=$(echo "$dataset" | jq -r '.value.top_level_dir') + + echo "======================================== ${dataset_name} ========================================" + + # Create the directory if it doesn't exist + mkdir -p "$directory" + + # Extract the file name from the download link + file_name=$(basename "$download_link") + + # Check if the folder exists before downloading the dataset + dataset_folder="$directory/$top_level_dir" + if [ ! -d "$dataset_folder" ]; then + # Download the dataset if it doesn't exist + if [ ! -f "$directory/$file_name" ]; then + echo "Downloading $file_name..." + if ! wget -O "$directory/$file_name" "$download_link"; then + echo "Failed to download $file_name" + continue + fi + fi + + # Unzip the dataset + echo "Unzipping $file_name into $directory..." + if tar -xvjf "$directory/$file_name" -C "$directory" --strip-components=1 --one-top-level="$top_level_dir"; then + echo "Finished unzipping $file_name." + else + echo "Failed to unzip $file_name" + fi + else + echo "Directory $dataset_folder already exists, skipping unzipping." + fi + done +} + +# Run the main function +main diff --git a/algorithms_test/2_setup.sh b/algorithms_test/2_setup.sh new file mode 100755 index 00000000..a3694b12 --- /dev/null +++ b/algorithms_test/2_setup.sh @@ -0,0 +1,172 @@ +#!/bin/bash + +# Function to run gsql commands +run_gsql_file() { + local file_path=$1 + local command=$2 + local graph_name=$3 + + if [ -e "$file_path" ]; then + echo "Running: $command $file_path" + if [ -n "$graph_name" ]; then + if ! gsql -u "$user_name" -p "$password" -g "$graph_name" "$file_path"; then + echo "Error: Failed to run $command $file_path for graph $graph_name" + return 1 + fi + else + if ! gsql -u "$user_name" -p "$password" "$file_path"; then + echo "Error: Failed to run $command $file_path" + return 1 + fi + fi + return 0 + else + echo "Error: File path $file_path does not exist." + return 1 + fi +} + +# Function to create and install queries +install_queries_for_graph() { + local graph_name=$1 + local queries_to_install=$2 + local repo_dir=$3 + + # Check if there are queries to install + if [ "$queries_to_install" != "null" ] && [ -n "$queries_to_install" ]; then + # Drop all queries for the graph + gsql -u "$user_name" -p "$password" -g "$graph_name" "drop query *" + + # Create the queries + echo "$queries_to_install" | jq -r '.[]' | while IFS= read -r query_path; do + gsql_query_path="${repo_dir}/${query_path}" + # Create the query + if ! run_gsql_file "$gsql_query_path" "Creating query" "$graph_name"; then + echo "Error: Failed to create query from $gsql_query_path for graph $graph_name" + return 1 + fi + done + + # Install the queries + echo "Installing queries for graph: $graph_name" + if ! gsql -u "$user_name" -p "$password" -g "$graph_name" "INSTALL QUERY *"; then + echo "Error: Failed to install the queries for graph: $graph_name" + return 1 + fi + else + echo "No queries to install for graph: $graph_name" + fi + return 0 +} + +# Main function +main() { + # Check if required commands are available + if ! command -v jq &> /dev/null; then + echo "Error: jq is not installed." + exit 1 + fi + + if ! command -v gsql &> /dev/null; then + echo "Error: gsql is not installed." + exit 1 + fi + + # Read the JSON configuration file + dir=$(cd "$(dirname "$0")"; pwd) + config_file="${dir}/config/2_setup.json" + gsql_dir="${dir}/gsql" + repo_dir="${dir}/.." + + # Check if the configuration file exists + if [ ! -f "$config_file" ]; then + echo "Error: Configuration file not found: $config_file" + exit 1 + fi + + # Extract user_name and password + user_name=$(jq -r '.tigergraph.user_name // empty' "$config_file") + password=$(jq -r '.tigergraph.password // empty' "$config_file") + + # Check if user_name and password are empty + if [ -z "$user_name" ]; then + echo "Error: user_name is not set in the configuration file." + exit 1 + fi + + if [ -z "$password" ]; then + echo "Error: password is not set in the configuration file." + exit 1 + fi + + # Extract execution steps and set default values if not provided + to_drop_graph=$(jq -r '.execution_steps.drop_graph // empty' "$config_file") + to_drop_graph=${to_drop_graph:-false} + to_create_schema=$(jq -r '.execution_steps.create_schema // empty' "$config_file") + to_create_schema=${to_create_schema:-false} + to_create_loading_job=$(jq -r '.execution_steps.create_loading_job // empty' "$config_file") + to_create_loading_job=${to_create_loading_job:-false} + to_run_loading_job=$(jq -r '.execution_steps.run_loading_job // empty' "$config_file") + to_run_loading_job=${to_run_loading_job:-false} + to_install_queries=$(jq -r '.execution_steps.install_queries // empty' "$config_file") + to_install_queries=${to_install_queries:-false} + + # Iterate over each graph and get its file_path + graphs=$(jq -r '.graphs | to_entries[] | @base64' "$config_file") + + echo "$graphs" | while IFS= read -r graph_b64; do + graph=$(echo "${graph_b64}" | base64 --decode) + graph_name=$(echo "$graph" | jq -r '.key') + file_path=$(echo "$graph" | jq -r '.value.file_path') + file_path=${file_path/#\~/$HOME} + queries_to_install=$(echo "$graph" | jq -r '.value.queries_to_install // empty') + + echo "======================================== ${graph_name} ========================================" + + # Drop the graph + if [ "$to_drop_graph" = "true" ]; then + echo "Dropping the graph ${graph_name}..." + gsql -u "$user_name" -p "$password" -g "$graph_name" "DROP JOB *" + gsql -u "$user_name" -p "$password" -g "$graph_name" "DROP QUERY *" + gsql -u "$user_name" -p "$password" "DROP GRAPH ${graph_name}" + echo "Finished dropping graph ${graph_name}." + echo "--------------------------------------------------------------------------------" + fi + + # Create the schema + if [ "$to_create_schema" = "true" ]; then + gsql_schema_path="$gsql_dir/$graph_name/1_create_schema.gsql" + run_gsql_file "$gsql_schema_path" "Creating schema" + echo "--------------------------------------------------------------------------------" + fi + + # Create the loading job + if [ "$to_create_loading_job" = "true" ]; then + gsql_loading_job_path="$gsql_dir/$graph_name/2_create_loading_job.gsql" + run_gsql_file "$gsql_loading_job_path" "Creating loading job" + echo "--------------------------------------------------------------------------------" + fi + + # Run the loading job + if [ "$to_run_loading_job" = "true" ]; then + if [ -e "$file_path" ]; then + echo "Running loading job for $file_path..." + if ! gsql -u "$user_name" -p "$password" -g "$graph_name" "RUN LOADING JOB loading_job USING f1=\"$file_path\""; then + echo "Error: Failed to run loading job for $file_path" + fi + echo "Finished running loading job for $file_path." + else + echo "Error: File path $file_path does not exist." + fi + echo "--------------------------------------------------------------------------------" + fi + + # Create and install the queries + if [ "$to_install_queries" = "true" ]; then + install_queries_for_graph "$graph_name" "$queries_to_install" "$repo_dir" + fi + done +} + +# Run the main function +main diff --git a/algorithms_test/3_run.sh b/algorithms_test/3_run.sh new file mode 100755 index 00000000..024dc26c --- /dev/null +++ b/algorithms_test/3_run.sh @@ -0,0 +1,189 @@ +#!/bin/bash + +# Function to run the curl command +run_curl_command() { + local graph_name=$1 + local query_name=$2 + local para_str=$3 + local timeout_ms=$4 + local result_file_path=$5 + local duration_file_path=$6 + + echo "Starting curl command for query: $query_name on graph: $graph_name" + + start_time=$(date +%s%N) + curl_result=$(curl -s -H "GSQL-TIMEOUT: $timeout_ms" "http://127.0.0.1:14240/restpp/query/${graph_name}/${query_name}?${para_str}") + end_time=$(date +%s%N) + + # Calculate the duration + duration=$((end_time - start_time)) + duration_in_milliseconds=$(echo "scale=3; $duration / 1000000" | bc) + + # Create query result directory if it doesn't exist + mkdir -p "$(dirname "$result_file_path")" + + # Check for errors in the curl result + error=$(echo "$curl_result" | jq -r '.error') + message=$(echo "$curl_result" | jq -r '.message') + + if [ "$error" = "true" ]; then + echo "Error: $message" + fi + + # Write the result of the curl command to file + echo "$curl_result" > "$result_file_path" + echo "Result has been written to $result_file_path" + + # Write the duration to file + echo "$duration_in_milliseconds" > "$duration_file_path" + echo "Duration has been written to $duration_file_path" + + echo "Finished curl command for query: $query_name on graph: $graph_name" +} + +# Main function +main() { + # Default config file path + dir=$(cd "$(dirname "$0")"; pwd) + config_file="${dir}/config/3_run.json" + + # Parse command-line arguments + while getopts "c:f:" opt; do + case ${opt} in + c) + config_file=$OPTARG + ;; + f) + filter=$OPTARG + ;; + \?) + echo "Usage: $0 [-c config_file] [-f filter]" + exit 1 + ;; + esac + done + + # Check if required commands are available + if ! command -v jq &> /dev/null; then + echo "Error: jq is not installed." + exit 1 + fi + + # Check if the configuration file exists + if [ ! -f "$config_file" ]; then + echo "Error: Configuration file not found: $config_file" + exit 1 + fi + + # Read general settings from the configuration file + default_graph_name=$(jq -r '.general_settings.default_graph_name' "$config_file") + default_timeout_in_minutes=$(jq -r '.general_settings.default_timeout_in_minutes' "$config_file") + default_output_directory=$(jq -r '.general_settings.default_output_directory' "$config_file") + default_output_directory=${default_output_directory/#\~/$HOME} + summary_file_path=$(jq -r '.general_settings.summary_file_path' "$config_file") + summary_file_path=${summary_file_path/#\~/$HOME} + + # Write header to the CSV file + echo "algorithm,run_number,query_run_time_sec,query_peak_memory_gb" > "$summary_file_path" + + # Run the memory monitor + source "${dir}/mem/1_start.sh" + + # Iterate over each algorithm + algorithms=$(jq -r '.algorithms | to_entries[] | .key' "$config_file") + for algorithm in $algorithms; do + # Apply filter if specified + if [ -n "$filter" ]; then + if [[ ! "$algorithm" == *$filter ]]; then + continue + fi + fi + + runs=$(jq -r ".algorithms[\"${algorithm}\"][]" "$config_file" | jq -c .) + + # Calculate total run number for an algorithm + mapfile -t runs_array <<< "$runs" + total_runs=${#runs_array[@]} + + # Iterate over each run for the algorithm + run_index=0 + for run in "${runs_array[@]}"; do + + echo "==================== ${algorithm} run ${run_index} ====================" + + # Reset the memory monitor + source "${dir}/mem/3_reset.sh" + + # Extract values from the JSON + graph_name=$(echo "$run" | jq -r '.graph_name // empty') + query_name=$(echo "$run" | jq -r '.query_name // empty') + timeout_in_minutes=$(echo "$run" | jq -r '.timeout_in_minutes // empty') + output_directory=$(echo "$run" | jq -r '.output_directory // empty') + parameters=$(echo "$run" | jq -r '.parameters') + + # Check if query_name exists + if [ -z "$query_name" ] || [ "$query_name" == "null" ]; then + echo "Error: query_name is a must-have key in the algorithm configuration for algorithm: $algorithm; index: $run_index" + exit 1 + fi + + # Set default values if not provided + graph_name=${graph_name:-$default_graph_name} + timeout_in_minutes=${timeout_in_minutes:-$default_timeout_in_minutes} + if [ "$total_runs" -eq 1 ]; then + output_directory=${output_directory:-"$default_output_directory/${algorithm}"} + else + output_directory=${output_directory:-"$default_output_directory/${algorithm}/${run_index}"} + fi + output_directory=${output_directory/#\~/$HOME} + result_file_path="$output_directory/result.json" + duration_file_path="$output_directory/duration.txt" + memory_file_path="$output_directory/memory.txt" + + # Create parameter list string + para_str=$(echo "$parameters" | jq -r 'to_entries | map("\(.key)=\(.value|tostring)") | join("&")') + + # Calculate timeout in milliseconds + timeout_ms=$((timeout_in_minutes * 60000)) + + # Run the curl command + run_curl_command "$graph_name" "$query_name" "$para_str" "$timeout_ms" "$result_file_path" "$duration_file_path" + + # Increment run index + run_index=$((run_index + 1)) + + # Save the peak memory to file + source "${dir}/mem/2_peak.sh" > "$memory_file_path" + echo "Peak memory has been written to $memory_file_path" + + # Check if duration.txt exists + if [ -f "$duration_file_path" ]; then + # Read query run time in ms and convert to seconds + query_run_time_ms=$(cat "$duration_file_path") + query_run_time_sec=$(echo "scale=3; $query_run_time_ms / 1000" | bc) + else + echo "Warning: $duration_file_path not found." + continue + fi + + # Check if memory.txt exists + if [ -f "$memory_file_path" ]; then + # Extract query peak memory from memory.txt + query_peak_memory=$(grep 'max mem - min mem' "$memory_file_path" | awk -F' ' '{print $(NF-1)}') + else + echo "Warning: $memory_file_path not found." + continue + fi + + # Write the data to the CSV file + echo "$algorithm,${run_index},$query_run_time_sec,$query_peak_memory" >> "$summary_file_path" + done + done + + # Stop the memory monitor + source "${dir}/mem/4_stop.sh" +} + +# Run the main function +main "$@" + diff --git a/algorithms_test/4_summary.sh b/algorithms_test/4_summary.sh new file mode 100755 index 00000000..23584dae --- /dev/null +++ b/algorithms_test/4_summary.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +# Main function +main() { + # Default config file path + dir=$(cd "$(dirname "$0")"; pwd) + config_file="${dir}/config/3_run.json" + + # Parse command-line arguments + while getopts "c:f:" opt; do + case ${opt} in + c) + config_file=$OPTARG + ;; + f) + filter=$OPTARG + ;; + \?) + echo "Usage: $0 [-c config_file] [-f filter]" + exit 1 + ;; + esac + done + + # Check if required commands are available + if ! command -v jq &> /dev/null; then + echo "Error: jq is not installed." + exit 1 + fi + + # Check if the configuration file exists + if [ ! -f "$config_file" ]; then + echo "Error: Configuration file not found: $config_file" + exit 1 + fi + + # Read general settings from the configuration file + default_graph_name=$(jq -r '.general_settings.default_graph_name' "$config_file") + default_output_directory=$(jq -r '.general_settings.default_output_directory' "$config_file") + default_output_directory=${default_output_directory/#\~/$HOME} + summary_file_path=$(jq -r '.general_settings.summary_file_path' "$config_file") + summary_file_path=${summary_file_path/#\~/$HOME} + + # Write header to the CSV file + echo "algorithm,run_number,query_run_time_sec,query_peak_memory_gb" > "$summary_file_path" + + # Iterate over each algorithm + algorithms=$(jq -r '.algorithms | to_entries[] | .key' "$config_file") + for algorithm in $algorithms; do + # Apply filter if specified + if [ -n "$filter" ]; then + if [[ ! "$algorithm" == *$filter ]]; then + continue + fi + fi + + runs=$(jq -r ".algorithms[\"${algorithm}\"][]" "$config_file" | jq -c .) + + # Calculate total run number for an algorithm + mapfile -t runs_array <<< "$runs" + total_runs=${#runs_array[@]} + + # Iterate over each run for the algorithm + run_index=0 + for run in "${runs_array[@]}"; do + + echo "==================== ${algorithm} run ${run_index} ====================" + + # Extract values from the JSON + graph_name=$(echo "$run" | jq -r '.graph_name // empty') + query_name=$(echo "$run" | jq -r '.query_name // empty') + output_directory=$(echo "$run" | jq -r '.output_directory // empty') + + # Set default values if not provided + graph_name=${graph_name:-$default_graph_name} + if [ "$total_runs" -eq 1 ]; then + output_directory=${output_directory:-"$default_output_directory/${algorithm}"} + else + output_directory=${output_directory:-"$default_output_directory/${algorithm}/${run_index}"} + fi + output_directory=${output_directory/#\~/$HOME} + result_file_path="$output_directory/result.json" + duration_file_path="$output_directory/duration.txt" + memory_file_path="$output_directory/memory.txt" + + # Read the duration and memory from the files + + # Check if duration.txt exists + if [ -f "$duration_file_path" ]; then + # Read query run time in ms and convert to seconds + query_run_time_ms=$(cat "$duration_file_path") + query_run_time_sec=$(echo "scale=3; $query_run_time_ms / 1000" | bc) + else + echo "Warning: $duration_file_path not found." + continue + fi + + # Check if memory.txt exists + if [ -f "$memory_file_path" ]; then + # Extract query peak memory from memory.txt + query_peak_memory=$(grep 'max mem - min mem' "$memory_file_path" | awk -F' ' '{print $(NF-1)}') + else + echo "Warning: $memory_file_path not found." + continue + fi + + # Write the data to the CSV file + echo "$algorithm,${run_index},$query_run_time_sec,$query_peak_memory" >> "$summary_file_path" + + # Increment run index + run_index=$((run_index + 1)) + done + done +} + +# Run the main function +main "$@" + diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md new file mode 100644 index 00000000..5dfa3f9f --- /dev/null +++ b/algorithms_test/ReadMe.md @@ -0,0 +1,733 @@ +# Setup +``` +algorithms_test (ALGOS-263) $ ./2_setup.sh +======================================== MyGraph ======================================== +Dropping the graph MyGraph... +Successfully dropped jobs on the graph 'MyGraph': [loading_job]. +All jobs on the graph 'MyGraph' are dropped. +Successfully dropped queries on the graph 'MyGraph': [tg_wcc, tg_shortest_ss_any_wt, tg_pagerank_pers_ap_batch, tg_tri_count_fast, tg_louvain, tg_shortest_ss_no_wt, tg_mst, tg_scc_small_world, tg_scc, tg_embedding_pairwise_cosine_similarity, tg_all_path, tg_tri_count, tg_all_path_bidirection, tg_knn_cosine_all, tg_fpm_pre, tg_common_neighbors, tg_maximal_indep_set, tg_eigenvector_cent, tg_pagerank_pers, tg_embedding_cosine_similarity, tg_pagerank_wt, tg_harmonic_cent, tg_bfs, tg_closeness_cent_approx, tg_astar, tg_degree_cent, tg_maximal_indep_set_random, tg_wcc_small_world, tg_greedy_graph_coloring, tg_betweenness_cent, tg_resource_allocation, tg_fastRP, tg_jaccard_nbor_ap_batch, tg_knn_cosine_cv_sub, tg_kmeans_sub, tg_influence_maximization_greedy, tg_max_BFS_depth, tg_lcc, tg_closeness_cent, tg_slpa, tg_adamic_adar, tg_cosine_nbor_ss, tg_cosine_nbor_ap_batch, tg_total_neighbors, tg_knn_cosine_all_sub, tg_same_community, tg_pagerank, tg_fpm, tg_kmeans, tg_map_equation, tg_estimate_diameter, tg_cycle_detection, tg_cycle_detection_batch, tg_knn_cosine_cv, tg_kcore, tg_article_rank, tg_weisfeiler_lehman, tg_jaccard_nbor_ss, tg_maxflow, tg_cycle_component, tg_weighted_degree_cent, tg_shortest_ss_pos_wt, tg_label_prop, tg_knn_cosine_ss, tg_shortest_ss_pos_wt_tb, tg_cycle_detection_count, tg_preferential_attachment, tg_msf, tg_influence_maximization_CELF, tmp1]. +All queries on the graph 'MyGraph' are dropped. +The graph MyGraph is dropped. +Finished dropping graph MyGraph. +-------------------------------------------------------------------------------- +Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/1_create_schema.gsql +The graph MyGraph is created. +Successfully created schema change jobs: [change_schema_of_MyGraph]. +WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. +Kick off schema change job change_schema_of_MyGraph +Doing schema change on graph 'MyGraph' (current version: 0) +Trying to add local vertex 'MyNode' to the graph 'MyGraph'. +Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph'. + +Graph MyGraph updated to new version 1 +The job change_schema_of_MyGraph completes in 0.900 seconds! +Local schema change succeeded. +Successfully dropped jobs on the graph 'MyGraph': [change_schema_of_MyGraph]. +-------------------------------------------------------------------------------- +Running: Creating loading job /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql +Using graph 'MyGraph' +Successfully created loading jobs: [loading_job]. +-------------------------------------------------------------------------------- +Running loading job for /home/tigergraph/data/public/zhishi-all/out.zhishi-all... +[Tip: Use "CTRL + C" to stop displaying the loading status update, then use "SHOW LOADING STATUS " to track the loading progress again] +[Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] +Running the following loading job: + Job name: loading_job + Jobid: MyGraph.loading_job.file.m1.1720513082456 + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1720513082456 +Job "MyGraph.loading_job.file.m1.1720513082456" loading status +Current timestamp is 2024-07-09 08:19:08.627 +Loading status was last updated at 2024-07-09 08:19:07.896. +[FINISHED] m1 ( Finished: 1 / Total: 1 ) + +-----------------------------------------------------------------------------------------------------+ + | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| + |zhishi-all/out.zhishi-all | 65905159 | 197715477 | 0 | 1014 kl/s | 64.97 s | 100 %| + +-----------------------------------------------------------------------------------------------------+ +LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1720513082456 + Job ID: MyGraph.loading_job.file.m1.1720513082456-----------------------------------------------------+ + Elapsed time: 65 sec + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1720513082456 + Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1720513082456/summary + +Finished running loading job for /home/tigergraph/data/public/zhishi-all/out.zhishi-all. +-------------------------------------------------------------------------------- +All queries are dropped. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/jaccard/all_pairs/tg_jaccard_nbor_ap_batch.gsql +Successfully created queries: [tg_jaccard_nbor_ap_batch]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/jaccard/single_source/tg_jaccard_nbor_ss.gsql +Successfully created queries: [tg_jaccard_nbor_ss]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/cosine/all_pairs/tg_cosine_nbor_ap_batch.gsql +Warning in query tg_cosine_nbor_ap_batch (WARN-5): line 80, col 39 +The comparison 'divisor==0' may lead to unexpected behavior because it involves +equality test between float/double numeric values. We suggest to do such +comparison with an error margin, e.g. 'abs((divisor) - (0)) < epsilon', where +epsilon is a very small positive value of your choice, such as 0.0001. +Successfully created queries: [tg_cosine_nbor_ap_batch]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/cosine/single_source/tg_cosine_nbor_ss.gsql +Successfully created queries: [tg_cosine_nbor_ss]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/preferential_attachment/tg_preferential_attachment.gsql +Successfully created queries: [tg_preferential_attachment]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/resource_allocation/tg_resource_allocation.gsql +Successfully created queries: [tg_resource_allocation]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/same_community/tg_same_community.gsql +Successfully created queries: [tg_same_community]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/adamic_adar/tg_adamic_adar.gsql +Successfully created queries: [tg_adamic_adar]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/total_neighbors/tg_total_neighbors.gsql +Successfully created queries: [tg_total_neighbors]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/common_neighbors/tg_common_neighbors.gsql +Successfully created queries: [tg_common_neighbors]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql +Successfully created queries: [tg_greedy_graph_coloring]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql +Successfully created queries: [tg_maximal_indep_set]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/maximal_independent_set/random/tg_maximal_indep_set_random.gsql +Successfully created queries: [tg_maximal_indep_set_random]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/all_pairs/tg_knn_cosine_all.gsql + +Semantic Check Error in query tg_knn_cosine_all (SEM-45): line 56, col 49 +The tuple name or the function tg_knn_cosine_all_sub is not defined. +Saved as draft query with type/semantic error: [tg_knn_cosine_all]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/all_pairs/tg_knn_cosine_all_sub.gsql +Successfully created queries: [tg_knn_cosine_all_sub]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql +Successfully created queries: [tg_knn_cosine_ss]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/cross_validation/tg_knn_cosine_cv_sub.gsql +Successfully created queries: [tg_knn_cosine_cv_sub]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/cross_validation/tg_knn_cosine_cv.gsql +Successfully created queries: [tg_knn_cosine_cv]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/EmbeddingSimilarity/pairwise/tg_embedding_pairwise_cosine_sim.gsql +Successfully created queries: [tg_embedding_pairwise_cosine_similarity]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/EmbeddingSimilarity/single_source/tg_embedding_cosine_sim.gsql +Successfully created queries: [tg_embedding_cosine_similarity]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/weisfeiler_lehman/tg_weisfeiler_lehman.gsql +Successfully created queries: [tg_weisfeiler_lehman]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/FastRP/tg_fastRP.gsql +Warning in query tg_fastRP (WARN-5): line 209, col 12 +The comparison 'mr<=p1' may lead to unexpected behavior because it involves +equality test between float/double numeric values. We suggest to do such +comparison with an error margin, e.g. 'mr<=p1 + epsilon', where epsilon is a +very small positive value of your choice, such as 0.0001. +Warning in query tg_fastRP (WARN-5): line 211, col 17 +The comparison 'mr<=p1+p2' may lead to unexpected behavior because it involves +equality test between float/double numeric values. We suggest to do such +comparison with an error margin, e.g. 'mr<=p1+p2 + epsilon', where epsilon is a +very small positive value of your choice, such as 0.0001. +Warning in query tg_fastRP (WARN-5): line 238, col 13 +The comparison 'square_sum==0.0' may lead to unexpected behavior because it +involves equality test between float/double numeric values. We suggest to do +such comparison with an error margin, e.g. 'abs((square_sum) - (0.0)) < +epsilon', where epsilon is a very small positive value of your choice, such as +0.0001. +Successfully created queries: [tg_fastRP]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql +Successfully created queries: [tg_slpa]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/label_propagation/tg_label_prop.gsql +Successfully created queries: [tg_label_prop]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/k_means/tg_kmeans_sub.gsql + +Type Check Error in query tg_kmeans_sub (TYP-158): line 43, col 33 +'t.embeddings' indicates no valid vertex type. +Possible reasons: + +- The expression refers to a primary_id, which is not directly +usable in the query body. To use primary_id, declare it as an +attribute. E.g "CREATE VERTEX Person (PRIMARY_ID ssn string, ssn string, age +int)" +- The expression has misspelled an attribute, or a vertex name + +Saved as draft query with type/semantic error: [tg_kmeans_sub]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/k_means/tg_kmeans.gsql + +Type Check Error in query tg_kmeans (TYP-5401): line 50, col 29 +Query 'tg_kmeans_sub' cannot be used as an expression, because it does not have +a return type. +Saved as draft query with type/semantic error: [tg_kmeans]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/triangle_counting/fast/tg_tri_count_fast.gsql +Successfully created queries: [tg_tri_count_fast]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/triangle_counting/standard/tg_tri_count.gsql +Successfully created queries: [tg_tri_count]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/strongly_connected_components/small_world/tg_scc_small_world.gsql +Successfully created queries: [tg_scc_small_world]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql +Successfully created queries: [tg_scc]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/weakly_connected_components/small_world/tg_wcc_small_world.gsql +Successfully created queries: [tg_wcc_small_world]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql +Successfully created queries: [tg_wcc]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/k_core/tg_kcore.gsql +Successfully created queries: [tg_kcore]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/map_equation/tg_map_equation.gsql +Successfully created queries: [tg_map_equation]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/local_clustering_coefficient/tg_lcc.gsql +Successfully created queries: [tg_lcc]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/louvain/tg_louvain.gsql +Warning in query tg_louvain (WARN-5): line 97, col 25 +The comparison '-t.@max_best_move.weight==t.@sum_cc_weight' may lead to +unexpected behavior because it involves equality test between float/double +numeric values. We suggest to do such comparison with an error margin, e.g. +'abs((-t.@max_best_move.weight) - (t.@sum_cc_weight)) < epsilon', where epsilon +is a very small positive value of your choice, such as 0.0001. +Warning in query tg_louvain (WARN-5): line 173, col 29 +The comparison '-s.@max_best_move.weight==s.@sum_cc_weight' may lead to +unexpected behavior because it involves equality test between float/double +numeric values. We suggest to do such comparison with an error margin, e.g. +'abs((-s.@max_best_move.weight) - (s.@sum_cc_weight)) < epsilon', where epsilon +is a very small positive value of your choice, such as 0.0001. +Successfully created queries: [tg_louvain]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_component/tg_cycle_component.gsql +Successfully created queries: [tg_cycle_component]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql +Successfully created queries: [tg_shortest_ss_no_wt]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql +Successfully created queries: [tg_shortest_ss_any_wt]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql +Warning in query tg_shortest_ss_pos_wt (WARN-5): line 102, col 20 +The comparison 's.@min_prev_path!=-1' may lead to unexpected behavior because it +involves equality test between float/double numeric values. We suggest to do +such comparison with an error margin, e.g. 'abs((s.@min_prev_path) - (-1)) > +epsilon', where epsilon is a very small positive value of your choice, such as +0.0001. +Successfully created queries: [tg_shortest_ss_pos_wt]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql +Warning in query tg_shortest_ss_pos_wt_tb (WARN-5): line 123, col 20 +The comparison 's.@min_prev_min_path!=-1' may lead to unexpected behavior +because it involves equality test between float/double numeric values. We +suggest to do such comparison with an error margin, e.g. +'abs((s.@min_prev_min_path) - (-1)) > epsilon', where epsilon is a very small +positive value of your choice, such as 0.0001. +Successfully created queries: [tg_shortest_ss_pos_wt_tb]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_detection/count/tg_cycle_detection_count.gsql +Successfully created queries: [tg_cycle_detection_count]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_detection/full_result/standard/tg_cycle_detection.gsql +Successfully created queries: [tg_cycle_detection]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_detection/full_result/batch/tg_cycle_detection_batch.gsql +Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 37 +unsatisfiable pattern e_type_set +Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 37 +unsatisfiable pattern e_type_set +Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 35 +unsatisfiable pattern -(e_type_set:e)- :t +Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 35 +unsatisfiable pattern -(e_type_set:e)- :t +Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 26 +unsatisfiable pattern Active:s -(e_type_set:e)- :t + +Type Check Error in query tg_cycle_detection_batch (TYP-8029): line 61, col 21 +the pattern "Active:s -(e_type_set:e)- :t" has an undirected edge, but the graph +does not contain any undirected edges + +Saved as draft query with type/semantic error: [tg_cycle_detection_batch]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/estimated_diameter/approximate/tg_estimate_diameter.gsql + +Semantic Check Error in query tg_estimate_diameter (SEM-45): line 52, col 34 +The tuple name or the function tg_max_BFS_depth is not defined. +Saved as draft query with type/semantic error: [tg_estimate_diameter]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/estimated_diameter/max_bfs/tg_max_BFS_depth.gsql +Successfully created queries: [tg_max_BFS_depth]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/astar_shortest_path/tg_astar.gsql +Successfully created queries: [tg_astar]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql +Successfully created queries: [tg_all_path]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/path_between_two_vertices/bidirection/tg_all_path_bidirection.gsql +Successfully created queries: [tg_all_path_bidirection]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/minimum_spanning_forest/tg_msf.gsql +Successfully created queries: [tg_msf]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/minimum_spanning_tree/tg_mst.gsql +Successfully created queries: [tg_mst]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/bfs/tg_bfs.gsql +Successfully created queries: [tg_bfs]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/maxflow/tg_maxflow.gsql +Warning in query tg_maxflow (WARN-5): line 129, col 25 +The comparison 'fl-@@group_by_flow_accum.get(s,t).flow>=@@max_cap_threshold' may +lead to unexpected behavior because it involves equality test between +float/double numeric values. We suggest to do such comparison with an error +margin, e.g. 'fl-@@group_by_flow_accum.get(s,t).flow>=@@max_cap_threshold - +epsilon', where epsilon is a very small positive value of your choice, such as +0.0001. +Warning in query tg_maxflow (WARN-5): line 140, col 25 +The comparison '@@group_by_flow_accum.get(t,s).flow>=@@max_cap_threshold' may +lead to unexpected behavior because it involves equality test between +float/double numeric values. We suggest to do such comparison with an error +margin, e.g. '@@group_by_flow_accum.get(t,s).flow>=@@max_cap_threshold - +epsilon', where epsilon is a very small positive value of your choice, such as +0.0001. +Warning in query tg_maxflow (WARN-5): line 211, col 20 +The comparison '@@group_by_flow_accum.get(s,t).flow>=min_flow_threshhold' may +lead to unexpected behavior because it involves equality test between +float/double numeric values. We suggest to do such comparison with an error +margin, e.g. '@@group_by_flow_accum.get(s,t).flow>=min_flow_threshhold - +epsilon', where epsilon is a very small positive value of your choice, such as +0.0001. +Successfully created queries: [tg_maxflow]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/betweenness/tg_betweenness_cent.gsql +Successfully created queries: [tg_betweenness_cent]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql +Successfully created queries: [tg_closeness_cent_approx]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/closeness/exact/tg_closeness_cent.gsql +Successfully created queries: [tg_closeness_cent]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql +Warning in query tg_eigenvector_cent (WARN-5): line 85, col 19 +The comparison 's.@sum_eigen_value==1.0' may lead to unexpected behavior because +it involves equality test between float/double numeric values. We suggest to do +such comparison with an error margin, e.g. 'abs((s.@sum_eigen_value) - (1.0)) < +epsilon', where epsilon is a very small positive value of your choice, such as +0.0001. +Successfully created queries: [tg_eigenvector_cent]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql +Successfully created queries: [tg_influence_maximization_greedy]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql +Warning in query tg_influence_maximization_CELF (WARN-5): line 83, col 19 +The comparison 's.@influence_value>=score' may lead to unexpected behavior +because it involves equality test between float/double numeric values. We +suggest to do such comparison with an error margin, e.g. +'s.@influence_value>=score - epsilon', where epsilon is a very small positive +value of your choice, such as 0.0001. +Successfully created queries: [tg_influence_maximization_CELF]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql +Successfully created queries: [tg_degree_cent]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql +Successfully created queries: [tg_weighted_degree_cent]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql +Successfully created queries: [tg_pagerank]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql +Successfully created queries: [tg_pagerank_wt]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql +Successfully created queries: [tg_pagerank_pers]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql +Successfully created queries: [tg_pagerank_pers_ap_batch]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/harmonic/tg_harmonic_cent.gsql +Successfully created queries: [tg_harmonic_cent]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/article_rank/tg_article_rank.gsql +Successfully created queries: [tg_article_rank]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Patterns/frequent_pattern_mining/tg_fpm_pre.gsql + +Type Check Error in query tg_fpm_pre (TYP-158): line 47, col 12 +'s.item_list' indicates no valid vertex type. +Possible reasons: + +- The expression refers to a primary_id, which is not directly +usable in the query body. To use primary_id, declare it as an +attribute. E.g "CREATE VERTEX Person (PRIMARY_ID ssn string, ssn string, age +int)" +- The expression has misspelled an attribute, or a vertex name + +Saved as draft query with type/semantic error: [tg_fpm_pre]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Patterns/frequent_pattern_mining/tg_fpm.gsql +Warning in query tg_fpm (WARN-7): line 151, col 9 +POST-ACCUM clauses binding to multiple aliases ([s, t]) will be deprecated soon. +Please separate into 2 POST-ACCUM clauses, one for each alias. +Warning in query tg_fpm (WARN-7): line 151, col 9 +POST-ACCUM clauses binding to multiple aliases ([s, t]) will be deprecated soon. +Please separate into 2 POST-ACCUM clauses, one for each alias. + +Type Check Error in query tg_fpm (TYP-158): line 80, col 33 +'s.item_list' indicates no valid vertex type. +Possible reasons: + +- The expression refers to a primary_id, which is not directly +usable in the query body. To use primary_id, declare it as an +attribute. E.g "CREATE VERTEX Person (PRIMARY_ID ssn string, ssn string, age +int)" +- The expression has misspelled an attribute, or a vertex name + +Saved as draft query with type/semantic error: [tg_fpm]. +Installing queries for graph: MyGraph +Start installing queries, about 1 minute ... +tg_tri_count_fast query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_tri_count_fast?v_type=VALUE&e_type=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_tri_count query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_tri_count?v_type=VALUE&e_type=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_all_path query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_all_path?v_source=VALUE&v_source.type=VERTEX_TYPE&target_v=VALUE&target_v.type=VERTEX_TYPE&[depth=VALUE]&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_embedding_pairwise_cosine_similarity query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_embedding_pairwise_cosine_similarity?v1=VALUE&v1.type=VERTEX_TYPE&v2=VALUE&v2.type=VERTEX_TYPE&embedding_dimension=VALUE&embedding_attribute=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_common_neighbors query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_common_neighbors?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_wcc query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_wcc?v_type_set=VALUE&e_type_set=VALUE&[print_limit=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_shortest_ss_no_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_no_wt?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&[print_limit=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_maximal_indep_set query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_maximal_indep_set?v_type=VALUE&e_type=VALUE&[maximum_iteration=VALUE]&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_scc_small_world query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_scc_small_world?v_type=VALUE&e_type=VALUE&reverse_e_type=VALUE&[threshold=VALUE]&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_all_path_bidirection query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_all_path_bidirection?v_source=VALUE&v_source.type=VERTEX_TYPE&target_v=VALUE&target_v.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&[depth=VALUE]&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_pagerank_pers_ap_batch query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank_pers_ap_batch?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&batch_num=VALUE&print_results=VALUE&file_path=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_embedding_cosine_similarity query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_embedding_cosine_similarity?v1=VALUE&v1.type=VERTEX_TYPE&vert_types=VALUE&embedding_dimension=VALUE&k=VALUE&embedding_attribute=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_eigenvector_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_eigenvector_cent?v_type_set=VALUE&e_type_set=VALUE&[maximum_iteration=VALUE]&[conv_limit=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_pagerank_pers query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank_pers?source[INDEX]=VALUE&source[INDEX].type=VERTEX_TYPE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_mst query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_mst?opt_source=VALUE&opt_source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[maximum_iteration=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_bfs query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_bfs?v_type_set=VALUE&e_type_set=VALUE&[max_hops=VALUE]&v_start=VALUE&v_start.type=VERTEX_TYPE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_pagerank_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank_wt?v_type=VALUE&e_type=VALUE&weight_attribute=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_degree_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_degree_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&[in_degree=VALUE]&[out_degree=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_shortest_ss_any_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_any_wt?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[print_limit=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_resource_allocation query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_resource_allocation?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_greedy_graph_coloring query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_greedy_graph_coloring?v_type_set=VALUE&e_type_set=VALUE&[max_colors=VALUE]&[print_color_count=VALUE]&[print_stats=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_wcc_small_world query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_wcc_small_world?v_type=VALUE&e_type=VALUE&[threshold=VALUE]&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_scc query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_scc?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&top_k_dist=VALUE&print_limit=VALUE&[maximum_iteration=VALUE]&[iter_wcc=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_maximal_indep_set_random query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_maximal_indep_set_random?v_type_set=VALUE&e_type_set=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_max_BFS_depth query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_max_BFS_depth?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_harmonic_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_harmonic_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&[max_hops=VALUE]&[top_k=VALUE]&[wf=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_adamic_adar query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_adamic_adar?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_louvain query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_louvain?v_type_set=VALUE&e_type_set=VALUE&[weight_attribute=VALUE]&[maximum_iteration=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[print_stats=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_influence_maximization_greedy query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_influence_maximization_greedy?v_type=VALUE&e_type=VALUE&weight_attribute=VALUE&top_k=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_jaccard_nbor_ap_batch query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_jaccard_nbor_ap_batch?[top_k=VALUE]&v_type_set=VALUE&feat_v_type=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&similarity_edge=VALUE&[src_batch_num=VALUE]&[nbor_batch_num=VALUE]&[print_results=VALUE]&[print_limit=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_total_neighbors query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_total_neighbors?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_lcc query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_lcc?v_type=VALUE&e_type=VALUE&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_knn_cosine_cv_sub query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_cv_sub?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&v_label=VALUE&weight_attribute=VALUE&max_k=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_betweenness_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_betweenness_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type=VALUE&[max_hops=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_cosine_nbor_ss query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cosine_nbor_ss?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&top_k=VALUE&print_limit=VALUE&[print_results=VALUE]&[file_path=VALUE]&[similarity_edge=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_closeness_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_closeness_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type=VALUE&[max_hops=VALUE]&[top_k=VALUE]&[wf=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_knn_cosine_all_sub query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_all_sub?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&label=VALUE&top_k=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_slpa query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_slpa?v_type_set=VALUE&e_type_set=VALUE&threshold=VALUE&maximum_iteration=VALUE&print_limit=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_pagerank query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_cycle_detection query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cycle_detection?v_type_set=VALUE&e_type_set=VALUE&depth=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_cosine_nbor_ap_batch query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cosine_nbor_ap_batch?v_type=VALUE&e_type=VALUE&edge_attribute=VALUE&top_k=VALUE&[print_results=VALUE]&file_path=VALUE&similarity_edge=VALUE&[num_of_batches=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_astar query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_astar?source_vertex=VALUE&source_vertex.type=VERTEX_TYPE&target_vertex=VALUE&target_vertex.type=VERTEX_TYPE&e_type_set=VALUE&weight_type=VALUE&latitude=VALUE&longitude=VALUE&weight_attribute=VALUE&[print_stats=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_same_community query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_same_community?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&community_attribute=VALUE&community_attr_type=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_weisfeiler_lehman query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_weisfeiler_lehman?v_type=VALUE&e_type=VALUE&DEPTH=VALUE&print_limit=VALUE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_cycle_component query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cycle_component?v_type=VALUE&e_type=VALUE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_map_equation query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_map_equation?v_type=VALUE&e_type=VALUE&result_attribute=VALUE&[weight_attribute=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_knn_cosine_cv query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_cv?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&label=VALUE&min_k=VALUE&max_k=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_jaccard_nbor_ss query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_jaccard_nbor_ss?source=VALUE&source.type=VERTEX_TYPE&e_type=VALUE&reverse_e_type=VALUE&[top_k=VALUE]&[print_results=VALUE]&[similarity_edge_type=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_article_rank query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_article_rank?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_weighted_degree_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_weighted_degree_cent?v_type=VALUE&e_type=VALUE&reverse_e_type=VALUE&weight_attribute=VALUE&[in_degree=VALUE]&[out_degree=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_preferential_attachment query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_preferential_attachment?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_label_prop query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_label_prop?v_type_set=VALUE&e_type_set=VALUE&maximum_iteration=VALUE&print_limit=VALUE&[print_results=VALUE]&[file_path=VALUE]&[result_attribute=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_shortest_ss_pos_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_pos_wt?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[epsilon=VALUE]&[print_results=VALUE]&[print_limit=VALUE]&[display_edges=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_kcore query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_kcore?v_type=VALUE&e_type=VALUE&[k_min=VALUE]&[k_max=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[print_all_k=VALUE]&[show_shells=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_cycle_detection_count query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cycle_detection_count?v_type_set=VALUE&e_type_set=VALUE&depth=VALUE&batches=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_closeness_cent_approx query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_closeness_cent_approx?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type=VALUE&[top_k=VALUE]&[k=VALUE]&[max_hops=VALUE]&[epsilon=VALUE]&[print_results=VALUE]&[file_path=VALUE]&[debug=VALUE]&[sample_index=VALUE]&[max_size=VALUE]&[wf=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_knn_cosine_ss query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_ss?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&label=VALUE&top_k=VALUE&[print_results=VALUE]&[file_path=VALUE]&[result_attribute=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_fastRP query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_fastRP?v_type_set=VALUE&e_type_set=VALUE&output_v_type_set=VALUE&iteration_weights=VALUE&beta=VALUE&embedding_dimension=VALUE&[default_index=VALUE]&default_length=VALUE&[default_weight=VALUE]&embedding_dim_map=VALUE&[sampling_constant=VALUE]&[random_seed=VALUE]&[result_attribute=VALUE]&[component_attribute=VALUE]&[batch_number=VALUE]&[filepath=VALUE]&[print_results=VALUE]&[choose_k=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_influence_maximization_CELF query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_influence_maximization_CELF?v_type=VALUE&e_type=VALUE&weight_attribute=VALUE&top_k=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_shortest_ss_pos_wt_tb query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_pos_wt_tb?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[epsilon=VALUE]&[print_results=VALUE]&[print_limit=VALUE]&[display_edges=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[write_size=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_maxflow query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_maxflow?source=VALUE&source.type=VERTEX_TYPE&sink=VALUE&sink.type=VERTEX_TYPE&v_type=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&cap_attr=VALUE&cap_type=VALUE&[min_flow_threshhold=VALUE]&[print_results=VALUE]&[display_edges=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_msf query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_msf?v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +Select 'm1' as compile server, now connecting ... +Node 'm1' is prepared as compile server. + +[======================================================================================================] 100% (62/62) +Query installation finished. +======================================== MyGraph2 ======================================== +Dropping the graph MyGraph2... +Successfully dropped jobs on the graph 'MyGraph2': [loading_job]. +All jobs on the graph 'MyGraph2' are dropped. +All queries are dropped. +The graph MyGraph2 is dropped. +Finished dropping graph MyGraph2. +-------------------------------------------------------------------------------- +Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph2/1_create_schema.gsql +The graph MyGraph2 is created. +Successfully created schema change jobs: [change_schema_of_MyGraph]. +WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. +Kick off schema change job change_schema_of_MyGraph +Doing schema change on graph 'MyGraph2' (current version: 0) +Trying to add local vertex 'MyNode' to the graph 'MyGraph2'. +Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph2'. + +Graph MyGraph2 updated to new version 1 +The job change_schema_of_MyGraph completes in 0.933 seconds! +Local schema change succeeded. +Successfully dropped jobs on the graph 'MyGraph2': [change_schema_of_MyGraph]. +-------------------------------------------------------------------------------- +Running: Creating loading job /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql +Using graph 'MyGraph2' +Successfully created loading jobs: [loading_job]. +-------------------------------------------------------------------------------- +Running loading job for /home/tigergraph/mydata/northeast_usa/out.dimacs9-NE... +[Tip: Use "CTRL + C" to stop displaying the loading status update, then use "SHOW LOADING STATUS " to track the loading progress again] +[Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] +Running the following loading job: + Job name: loading_job + Jobid: MyGraph2.loading_job.file.m1.1720513297218 + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph2.loading_job.file.m1.1720513297218 +Job "MyGraph2.loading_job.file.m1.1720513297218" loading status +Current timestamp is 2024-07-09 08:21:42.475 +Loading status was last updated at 2024-07-09 08:21:42.261. +[FINISHED] m1 ( Finished: 1 / Total: 1 ) + +-------------------------------------------------------------------------------------------------------+ + | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| + |northeast_usa/out.dimacs9-NE | 3868020 | 11604060 | 0 | 804 kl/s | 4.81 s | 100 %| + +-------------------------------------------------------------------------------------------------------+ +LOAD SUCCESSFUL for loading jobid: MyGraph2.loading_job.file.m1.1720513297218 + Job ID: MyGraph2.loading_job.file.m1.1720513297218------------------------------------------------------+ + Elapsed time: 5 sec + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph2.loading_job.file.m1.1720513297218 + Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph2.loading_job.file.m1.1720513297218/summary + +Finished running loading job for /home/tigergraph/mydata/northeast_usa/out.dimacs9-NE. +-------------------------------------------------------------------------------- +No queries to install for graph: MyGraph2 +algorithms_test (ALGOS-263) $ +algorithms_test (ALGOS-263) $ ll +total 40 +-rwxr-xr-x 1 tigergraph tigergraph 2369 Jul 5 09:40 1_dataset.sh +-rwxr-xr-x 1 tigergraph tigergraph 6042 Jul 8 06:24 2_setup.sh +-rwxr-xr-x 1 tigergraph tigergraph 5213 Jul 8 09:08 3_run.sh +drwxrwxr-x 2 tigergraph tigergraph 4096 Jul 4 08:34 baseline +drwxr-xr-x 2 tigergraph tigergraph 4096 Jul 9 08:16 config +drwxrwxr-x 4 tigergraph tigergraph 4096 Jul 8 02:24 gsql +drwxr-xr-x 2 tigergraph tigergraph 4096 Jul 8 03:34 mem +-rw-rw-r-- 1 tigergraph tigergraph 0 Jul 4 08:38 ReadMe.md +drwxr-xr-x 2 tigergraph tigergraph 4096 Jul 8 06:53 tools +algorithms_test (ALGOS-263) $ +``` + +# Run +``` +algorithms_test (ALGOS-263) $ ./3_run.sh +==================== topological_link_prediction/tg_preferential_attachment run 0 ==================== +Starting curl command for query: tg_preferential_attachment on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_preferential_attachment/result.json +Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_preferential_attachment/duration.txt +Finished curl command for query: tg_preferential_attachment on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_preferential_attachment/memory.txt +==================== topological_link_prediction/tg_resource_allocation run 0 ==================== +Starting curl command for query: tg_resource_allocation on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_resource_allocation/result.json +Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_resource_allocation/duration.txt +Finished curl command for query: tg_resource_allocation on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_resource_allocation/memory.txt +==================== topological_link_prediction/tg_same_community run 0 ==================== +Starting curl command for query: tg_same_community on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_same_community/result.json +Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_same_community/duration.txt +Finished curl command for query: tg_same_community on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_same_community/memory.txt +==================== topological_link_prediction/tg_adamic_adar run 0 ==================== +Starting curl command for query: tg_adamic_adar on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_adamic_adar/result.json +Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_adamic_adar/duration.txt +Finished curl command for query: tg_adamic_adar on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_adamic_adar/memory.txt +==================== topological_link_prediction/tg_total_neighbors run 0 ==================== +Starting curl command for query: tg_total_neighbors on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_total_neighbors/result.json +Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_total_neighbors/duration.txt +Finished curl command for query: tg_total_neighbors on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_total_neighbors/memory.txt +==================== classification/tg_greedy_graph_coloring run 0 ==================== +Starting curl command for query: tg_greedy_graph_coloring on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16777220.RESTPP_1_1.1720513538258.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/result.json +Duration has been written to /home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/duration.txt +Finished curl command for query: tg_greedy_graph_coloring on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/memory.txt +==================== classification/tg_maximal_indep_set run 0 ==================== +Starting curl command for query: tg_maximal_indep_set on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/classification/tg_maximal_indep_set/result.json +Duration has been written to /home/tigergraph/data/algos/classification/tg_maximal_indep_set/duration.txt +Finished curl command for query: tg_maximal_indep_set on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/classification/tg_maximal_indep_set/memory.txt +==================== classification/tg_knn_cosine_ss run 0 ==================== +Starting curl command for query: tg_knn_cosine_ss on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_ss/result.json +Duration has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_ss/duration.txt +Finished curl command for query: tg_knn_cosine_ss on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_ss/memory.txt +==================== classification/tg_knn_cosine_cv run 0 ==================== +Starting curl command for query: tg_knn_cosine_cv on graph: MyGraph +Error: Runtime Error: divider is zero. +Result has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_cv/result.json +Duration has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_cv/duration.txt +Finished curl command for query: tg_knn_cosine_cv on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_cv/memory.txt +==================== community/tg_slpa run 0 ==================== +Starting curl command for query: tg_slpa on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16777225.RESTPP_1_1.1720514490906.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/community/tg_slpa/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_slpa/duration.txt +Finished curl command for query: tg_slpa on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_slpa/memory.txt +==================== community/tg_label_prop run 0 ==================== +Starting curl command for query: tg_label_prop on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_label_prop/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_label_prop/duration.txt +Finished curl command for query: tg_label_prop on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_label_prop/memory.txt +==================== community/tg_tri_count_fast run 0 ==================== +Starting curl command for query: tg_tri_count_fast on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_tri_count_fast/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_tri_count_fast/duration.txt +Finished curl command for query: tg_tri_count_fast on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_tri_count_fast/memory.txt +==================== community/tg_tri_count run 0 ==================== +Starting curl command for query: tg_tri_count on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_tri_count/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_tri_count/duration.txt +Finished curl command for query: tg_tri_count on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_tri_count/memory.txt +==================== community/tg_scc run 0 ==================== +Starting curl command for query: tg_scc on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_scc/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_scc/duration.txt +Finished curl command for query: tg_scc on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_scc/memory.txt +==================== community/tg_scc_small_world run 0 ==================== +Starting curl command for query: tg_scc_small_world on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_scc_small_world/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_scc_small_world/duration.txt +Finished curl command for query: tg_scc_small_world on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_scc_small_world/memory.txt +==================== community/tg_wcc run 0 ==================== +Starting curl command for query: tg_wcc on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_wcc/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_wcc/duration.txt +Finished curl command for query: tg_wcc on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_wcc/memory.txt +==================== community/tg_wcc_small_world run 0 ==================== +Starting curl command for query: tg_wcc_small_world on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/duration.txt +Finished curl command for query: tg_wcc_small_world on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/memory.txt +==================== community/tg_kcore run 0 ==================== +Starting curl command for query: tg_kcore on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_kcore/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_kcore/duration.txt +Finished curl command for query: tg_kcore on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_kcore/memory.txt +==================== community/tg_map_equation run 0 ==================== +Starting curl command for query: tg_map_equation on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_map_equation/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_map_equation/duration.txt +Finished curl command for query: tg_map_equation on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_map_equation/memory.txt +==================== community/tg_lcc run 0 ==================== +Starting curl command for query: tg_lcc on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_lcc/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_lcc/duration.txt +Finished curl command for query: tg_lcc on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_lcc/memory.txt +==================== community/tg_louvain run 0 ==================== +Starting curl command for query: tg_louvain on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_louvain/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_louvain/duration.txt +Finished curl command for query: tg_louvain on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_louvain/memory.txt +==================== path/tg_cycle_component run 0 ==================== +Starting curl command for query: tg_cycle_component on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/path/tg_cycle_component/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_cycle_component/duration.txt +Finished curl command for query: tg_cycle_component on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_cycle_component/memory.txt +==================== path/tg_shortest_ss_no_wt run 0 ==================== +Starting curl command for query: tg_shortest_ss_no_wt on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/duration.txt +Finished curl command for query: tg_shortest_ss_no_wt on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/memory.txt +==================== path/tg_shortest_ss_any_wt run 0 ==================== +Starting curl command for query: tg_shortest_ss_any_wt on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16777235.RESTPP_1_1.1720516010379.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/duration.txt +Finished curl command for query: tg_shortest_ss_any_wt on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/memory.txt +==================== path/tg_shortest_ss_pos_wt run 0 ==================== +Starting curl command for query: tg_shortest_ss_pos_wt on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/duration.txt +Finished curl command for query: tg_shortest_ss_pos_wt on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/memory.txt +==================== path/tg_shortest_ss_pos_wt_tb run 0 ==================== +Starting curl command for query: tg_shortest_ss_pos_wt_tb on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/duration.txt +Finished curl command for query: tg_shortest_ss_pos_wt_tb on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/memory.txt +==================== path/tg_all_path run 0 ==================== +Starting curl command for query: tg_all_path on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16973847.RESTPP_1_1.1720516966396.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/path/tg_all_path/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_all_path/duration.txt +Finished curl command for query: tg_all_path on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_all_path/memory.txt +==================== path/tg_msf run 0 ==================== +Starting curl command for query: tg_msf on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16973851.RESTPP_1_1.1720517867862.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/path/tg_msf/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_msf/duration.txt +Finished curl command for query: tg_msf on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_msf/memory.txt +==================== path/tg_mst run 0 ==================== +Starting curl command for query: tg_mst on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/path/tg_mst/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_mst/duration.txt +Finished curl command for query: tg_mst on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_mst/memory.txt +==================== path/tg_bfs run 0 ==================== +Starting curl command for query: tg_bfs on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/path/tg_bfs/result.json +Duration has been written to /home/tigergraph/data/algos/path/tg_bfs/duration.txt +Finished curl command for query: tg_bfs on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/path/tg_bfs/memory.txt +==================== centrality/tg_closeness_cent_approx run 0 ==================== +Starting curl command for query: tg_closeness_cent_approx on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16842784.RESTPP_1_1.1720518776855.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/duration.txt +Finished curl command for query: tg_closeness_cent_approx on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/memory.txt +==================== centrality/tg_eigenvector_cent run 0 ==================== +Starting curl command for query: tg_eigenvector_cent on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_eigenvector_cent/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_eigenvector_cent/duration.txt +Finished curl command for query: tg_eigenvector_cent on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_eigenvector_cent/memory.txt +==================== centrality/tg_influence_maximization_greedy run 0 ==================== +Starting curl command for query: tg_influence_maximization_greedy on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/duration.txt +Finished curl command for query: tg_influence_maximization_greedy on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/memory.txt +==================== centrality/tg_influence_maximization_CELF run 0 ==================== +Starting curl command for query: tg_influence_maximization_CELF on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/duration.txt +Finished curl command for query: tg_influence_maximization_CELF on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/memory.txt +==================== centrality/tg_degree_cent run 0 ==================== +Starting curl command for query: tg_degree_cent on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_degree_cent/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_degree_cent/duration.txt +Finished curl command for query: tg_degree_cent on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_degree_cent/memory.txt +==================== centrality/tg_weighted_degree_cent run 0 ==================== +Starting curl command for query: tg_weighted_degree_cent on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/duration.txt +Finished curl command for query: tg_weighted_degree_cent on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/memory.txt +==================== centrality/tg_pagerank run 0 ==================== +Starting curl command for query: tg_pagerank on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank/duration.txt +Finished curl command for query: tg_pagerank on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank/memory.txt +==================== centrality/tg_pagerank_wt run 0 ==================== +Starting curl command for query: tg_pagerank_wt on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_wt/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_wt/duration.txt +Finished curl command for query: tg_pagerank_wt on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_wt/memory.txt +==================== centrality/tg_pagerank_pers run 0 ==================== +Starting curl command for query: tg_pagerank_pers on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers/duration.txt +Finished curl command for query: tg_pagerank_pers on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers/memory.txt +==================== centrality/tg_pagerank_pers_ap_batch run 0 ==================== +Starting curl command for query: tg_pagerank_pers_ap_batch on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16842793.RESTPP_1_1.1720520220984.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/duration.txt +Finished curl command for query: tg_pagerank_pers_ap_batch on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/memory.txt +==================== centrality/tg_harmonic_cent run 0 ==================== +Starting curl command for query: tg_harmonic_cent on graph: MyGraph +Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16842797.RESTPP_1_1.1720521122397.N) for details. Try increase header GSQL-TIMEOUT value. +Result has been written to /home/tigergraph/data/algos/centrality/tg_harmonic_cent/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_harmonic_cent/duration.txt +Finished curl command for query: tg_harmonic_cent on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_harmonic_cent/memory.txt +==================== centrality/tg_article_rank run 0 ==================== +Starting curl command for query: tg_article_rank on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/centrality/tg_article_rank/result.json +Duration has been written to /home/tigergraph/data/algos/centrality/tg_article_rank/duration.txt +Finished curl command for query: tg_article_rank on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_article_rank/memory.txt +``` diff --git a/algorithms_test/config/1_dataset.json b/algorithms_test/config/1_dataset.json new file mode 100644 index 00000000..e0991551 --- /dev/null +++ b/algorithms_test/config/1_dataset.json @@ -0,0 +1,16 @@ +{ + "general_settings": { + "default_directory": "~/data/public" + }, + "datasets": { + "Zhishi": { + "download_link": "http://konect.cc/files/download.tsv.zhishi-all.tar.bz2", + "top_level_dir": "zhishi-all" + }, + "Northeast USA": { + "download_link": "http://konect.cc/files/download.tsv.dimacs9-NE.tar.bz2", + "directory": "~/mydata", + "top_level_dir": "northeast_usa" + } + } +} diff --git a/algorithms_test/config/2_setup.json b/algorithms_test/config/2_setup.json new file mode 100644 index 00000000..1edaa1e4 --- /dev/null +++ b/algorithms_test/config/2_setup.json @@ -0,0 +1,92 @@ +{ + "tigergraph": { + "user_name": "tigergraph", + "password": "tigergraph" + }, + "execution_steps": { + "drop_graph": true, + "create_schema": true, + "create_loading_job": true, + "run_loading_job": true, + "install_queries":true + }, + "graphs": { + "MyGraph": { + "file_path": "~/data/public/zhishi-all/out.zhishi-all", + "queries_to_install": [ + "algorithms/Similarity/jaccard/all_pairs/tg_jaccard_nbor_ap_batch.gsql", + "algorithms/Similarity/jaccard/single_source/tg_jaccard_nbor_ss.gsql", + "algorithms/Similarity/cosine/all_pairs/tg_cosine_nbor_ap_batch.gsql", + "algorithms/Similarity/cosine/single_source/tg_cosine_nbor_ss.gsql", + "algorithms/Topological Link Prediction/preferential_attachment/tg_preferential_attachment.gsql", + "algorithms/Topological Link Prediction/resource_allocation/tg_resource_allocation.gsql", + "algorithms/Topological Link Prediction/same_community/tg_same_community.gsql", + "algorithms/Topological Link Prediction/adamic_adar/tg_adamic_adar.gsql", + "algorithms/Topological Link Prediction/total_neighbors/tg_total_neighbors.gsql", + "algorithms/Topological Link Prediction/common_neighbors/tg_common_neighbors.gsql", + "algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql", + "algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql", + "algorithms/Classification/maximal_independent_set/random/tg_maximal_indep_set_random.gsql", + "algorithms/Classification/k_nearest_neighbors/all_pairs/tg_knn_cosine_all.gsql", + "algorithms/Classification/k_nearest_neighbors/all_pairs/tg_knn_cosine_all_sub.gsql", + "algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql", + "algorithms/Classification/k_nearest_neighbors/cross_validation/tg_knn_cosine_cv_sub.gsql", + "algorithms/Classification/k_nearest_neighbors/cross_validation/tg_knn_cosine_cv.gsql", + "algorithms/GraphML/Embeddings/EmbeddingSimilarity/pairwise/tg_embedding_pairwise_cosine_sim.gsql", + "algorithms/GraphML/Embeddings/EmbeddingSimilarity/single_source/tg_embedding_cosine_sim.gsql", + "algorithms/GraphML/Embeddings/weisfeiler_lehman/tg_weisfeiler_lehman.gsql", + "algorithms/GraphML/Embeddings/FastRP/tg_fastRP.gsql", + "algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql", + "algorithms/Community/label_propagation/tg_label_prop.gsql", + "algorithms/Community/k_means/tg_kmeans_sub.gsql", + "algorithms/Community/k_means/tg_kmeans.gsql", + "algorithms/Community/triangle_counting/fast/tg_tri_count_fast.gsql", + "algorithms/Community/triangle_counting/standard/tg_tri_count.gsql", + "algorithms/Community/connected_components/strongly_connected_components/small_world/tg_scc_small_world.gsql", + "algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql", + "algorithms/Community/connected_components/weakly_connected_components/small_world/tg_wcc_small_world.gsql", + "algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql", + "algorithms/Community/k_core/tg_kcore.gsql", + "algorithms/Community/map_equation/tg_map_equation.gsql", + "algorithms/Community/local_clustering_coefficient/tg_lcc.gsql", + "algorithms/Community/louvain/tg_louvain.gsql", + "algorithms/Path/cycle_component/tg_cycle_component.gsql", + "algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql", + "algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql", + "algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql", + "algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql", + "algorithms/Path/cycle_detection/count/tg_cycle_detection_count.gsql", + "algorithms/Path/cycle_detection/full_result/standard/tg_cycle_detection.gsql", + "algorithms/Path/cycle_detection/full_result/batch/tg_cycle_detection_batch.gsql", + "algorithms/Path/estimated_diameter/approximate/tg_estimate_diameter.gsql", + "algorithms/Path/estimated_diameter/max_bfs/tg_max_BFS_depth.gsql", + "algorithms/Path/astar_shortest_path/tg_astar.gsql", + "algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql", + "algorithms/Path/path_between_two_vertices/bidirection/tg_all_path_bidirection.gsql", + "algorithms/Path/minimum_spanning_forest/tg_msf.gsql", + "algorithms/Path/minimum_spanning_tree/tg_mst.gsql", + "algorithms/Path/bfs/tg_bfs.gsql", + "algorithms/Path/maxflow/tg_maxflow.gsql", + "algorithms/Centrality/betweenness/tg_betweenness_cent.gsql", + "algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql", + "algorithms/Centrality/closeness/exact/tg_closeness_cent.gsql", + "algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql", + "algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql", + "algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql", + "algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql", + "algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql", + "algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql", + "algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql", + "algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql", + "algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql", + "algorithms/Centrality/harmonic/tg_harmonic_cent.gsql", + "algorithms/Centrality/article_rank/tg_article_rank.gsql", + "algorithms/Patterns/frequent_pattern_mining/tg_fpm_pre.gsql", + "algorithms/Patterns/frequent_pattern_mining/tg_fpm.gsql" + ] + }, + "MyGraph2": { + "file_path": "~/mydata/northeast_usa/out.dimacs9-NE" + } + } +} diff --git a/algorithms_test/config/3_run.json b/algorithms_test/config/3_run.json new file mode 100644 index 00000000..ed3cc5f8 --- /dev/null +++ b/algorithms_test/config/3_run.json @@ -0,0 +1,621 @@ +{ + "general_settings": { + "default_graph_name": "MyGraph", + "default_timeout_in_minutes": "15", + "default_output_directory": "~/data/algos", + "summary_file_path": "~/data/algos/summary.csv" + }, + "algorithms": { + "topological_link_prediction/tg_preferential_attachment": [ + { + "query_name": "tg_preferential_attachment", + "parameters": { + "v_source": "117001", + "v_source.type": "MyNode", + "v_target": "135375", + "v_target.type": "MyNode", + "e_type_set": "MyEdge" + } + } + ], + "topological_link_prediction/tg_resource_allocation": [ + { + "query_name": "tg_resource_allocation", + "parameters": { + "v_source": "117001", + "v_source.type": "MyNode", + "v_target": "135375", + "v_target.type": "MyNode", + "e_type_set": "MyEdge" + } + } + ], + "topological_link_prediction/tg_same_community": [ + { + "query_name": "tg_same_community", + "parameters": { + "v_source": "117001", + "v_source.type": "MyNode", + "v_target": "135375", + "v_target.type": "MyNode", + "community_attribute": "community", + "community_attr_type": "STRING" + } + } + ], + "topological_link_prediction/tg_adamic_adar": [ + { + "query_name": "tg_adamic_adar", + "parameters": { + "v_source": "117001", + "v_source.type": "MyNode", + "v_target": "135375", + "v_target.type": "MyNode", + "e_type_set": "MyEdge" + } + } + ], + "topological_link_prediction/tg_total_neighbors": [ + { + "query_name": "tg_total_neighbors", + "parameters": { + "v_source": "117001", + "v_source.type": "MyNode", + "v_target": "135375", + "v_target.type": "MyNode", + "e_type_set": "MyEdge" + } + } + ], + "classification/tg_greedy_graph_coloring": [ + { + "query_name": "tg_greedy_graph_coloring", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "max_colors": 999999, + "print_color_count": true, + "print_stats": false, + "file_path": "/home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/result.csv" + } + } + ], + "classification/tg_maximal_indep_set": [ + { + "query_name": "tg_maximal_indep_set", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "maximum_iteration": 100, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/classification/tg_maximal_indep_set/result.csv" + } + } + ], + "classification/tg_knn_cosine_ss": [ + { + "query_name": "tg_knn_cosine_ss", + "parameters": { + "source": "117001", + "source.type": "MyNode", + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "reverse_e_type_set": "rev_MyEdge", + "weight_attribute": "weight", + "label": "community", + "top_k": 10, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/classification/tg_knn_cosine_ss/result.csv" + } + } + ], + "classification/tg_knn_cosine_cv": [ + { + "query_name": "tg_knn_cosine_cv", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "reverse_e_type_set": "rev_MyEdge", + "weight_attribute": "weight", + "label": "community", + "min_k": 10, + "max_k": 20 + } + } + ], + "community/tg_slpa": [ + { + "query_name": "tg_slpa", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "threshold": 0.8, + "maximum_iteration": 10, + "print_limit": 10, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/community/tg_slpa/result.csv" + } + } + ], + "community/tg_label_prop": [ + { + "query_name": "tg_label_prop", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "maximum_iteration": 10, + "print_limit": 10, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/community/tg_label_prop/result.csv" + } + } + ], + "community/tg_tri_count_fast": [ + { + "query_name": "tg_tri_count_fast", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge" + } + } + ], + "community/tg_tri_count": [ + { + "query_name": "tg_tri_count", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge" + } + } + ], + "community/tg_scc": [ + { + "query_name": "tg_scc", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "reverse_e_type_set": "rev_MyEdge", + "top_k_dist": 10, + "print_limit": 10, + "maximum_iteration": 500, + "iter_wcc": 5, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/community/tg_scc/result.csv" + } + } + ], + "community/tg_scc_small_world": [ + { + "query_name": "tg_scc_small_world", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "reverse_e_type": "rev_MyEdge", + "threshold": 100000, + "print_results": false + } + } + ], + "community/tg_wcc": [ + { + "query_name": "tg_wcc", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "print_limit": 10, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/community/tg_wcc/result.csv" + } + } + ], + "community/tg_wcc_small_world": [ + { + "query_name": "tg_wcc_small_world", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "threshold": 100000, + "print_results": false + } + } + ], + "community/tg_kcore": [ + { + "query_name": "tg_kcore", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "k_min": 0, + "k_max": -1, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/community/tg_kcore/result.csv", + "print_all_k": false, + "show_shells": false + } + } + ], + "community/tg_map_equation": [ + { + "query_name": "tg_map_equation", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "result_attribute": "", + "weight_attribute": "" + } + } + ], + "community/tg_lcc": [ + { + "query_name": "tg_lcc", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "top_k": 10, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/community/tg_lcc/result.csv", + "display_edges": false + } + } + ], + "community/tg_louvain": [ + { + "query_name": "tg_louvain", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "weight_attribute": "weight", + "maximum_iteration": 10, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/community/tg_lcc/result.csv", + "print_stats": false + } + } + ], + "path/tg_cycle_component": [ + { + "query_name": "tg_cycle_component", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_cycle_component/result.csv" + } + } + ], + "path/tg_shortest_ss_no_wt": [ + { + "query_name": "tg_shortest_ss_no_wt", + "parameters": { + "source": "117001", + "source.type": "MyNode", + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "print_limit": -1, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/result.csv", + "display_edges": false + } + } + ], + "path/tg_shortest_ss_any_wt": [ + { + "query_name": "tg_shortest_ss_any_wt", + "parameters": { + "source": "117001", + "source.type": "MyNode", + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "weight_attribute": "weight", + "weight_type": "DOUBLE", + "print_limit": -1, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/result.csv", + "display_edges": false + } + } + ], + "path/tg_shortest_ss_pos_wt": [ + { + "query_name": "tg_shortest_ss_pos_wt", + "parameters": { + "source": "117001", + "source.type": "MyNode", + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "weight_attribute": "weight", + "weight_type": "DOUBLE", + "epsilon": 0.001, + "print_results": false, + "print_limit": -1, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/result.csv", + "display_edges": false + } + } + ], + "path/tg_shortest_ss_pos_wt_tb": [ + { + "query_name": "tg_shortest_ss_pos_wt_tb", + "parameters": { + "source": "117001", + "source.type": "MyNode", + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "weight_attribute": "weight", + "weight_type": "DOUBLE", + "epsilon": 0.001, + "print_results": false, + "print_limit": -1, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/result.csv", + "display_edges": false, + "write_size": 10000 + } + } + ], + "path/tg_all_path": [ + { + "query_name": "tg_all_path", + "parameters": { + "v_source": "117001", + "v_source.type": "MyNode", + "target_v": "135375", + "target_v.type": "MyNode", + "depth": 10, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/path/tg_all_path/result.csv" + } + } + ], + "path/tg_msf": [ + { + "query_name": "tg_msf", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "weight_attribute": "weight", + "weight_type": "DOUBLE", + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_msf/result.csv" + } + } + ], + "path/tg_mst": [ + { + "query_name": "tg_mst", + "parameters": { + "opt_source": "117001", + "opt_source.type": "MyNode", + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "weight_attribute": "weigth", + "weight_type": "DOUBLE", + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_mst/result.csv" + } + } + ], + "path/tg_bfs": [ + { + "query_name": "tg_bfs", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "max_hops": 10, + "v_start": "117001", + "v_start.type": "MyNode", + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/path/tg_bfs/result.csv", + "display_edges": true + } + } + ], + "centrality/tg_closeness_cent_approx": [ + { + "query_name": "tg_closeness_cent_approx", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "reverse_e_type": "rev_MyEdge", + "top_k": 10, + "k": 10, + "max_hops": 10, + "epsilon": 0.1, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/result.csv", + "debug": 0, + "sample_index": 0, + "max_size": 1000, + "wf": true + } + } + ], + "centrality/tg_eigenvector_cent": [ + { + "query_name": "tg_eigenvector_cent", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "maximum_iteration": 10, + "conv_limit": 0.000001, + "top_k": 10, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_eigenvector_cent/result.csv" + } + } + ], + "centrality/tg_influence_maximization_greedy": [ + { + "query_name": "tg_influence_maximization_greedy", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "weight_attribute": "weight", + "top_k": 10, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/result.csv" + } + } + ], + "centrality/tg_influence_maximization_CELF": [ + { + "query_name": "tg_influence_maximization_CELF", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "weight_attribute": "weight", + "top_k": 10, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/result.csv" + } + } + ], + "centrality/tg_degree_cent": [ + { + "query_name": "tg_degree_cent", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "reverse_e_type_set": "rev_MyEdge", + "in_degree": true, + "out_degree": true, + "top_k": 10, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_degree_cent/result.csv" + } + } + ], + "centrality/tg_weighted_degree_cent": [ + { + "query_name": "tg_weighted_degree_cent", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "reverse_e_type": "rev_MyEdge", + "weight_attribute": "weight", + "in_degree": true, + "out_degree": true, + "top_k": 10, + "print_results": false, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/result.csv" + } + } + ], + "centrality/tg_pagerank": [ + { + "query_name": "tg_pagerank", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "max_change": 0.001, + "maximum_iteration": 25, + "damping": 0.85, + "top_k": 10, + "print_results": true, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_pagerank/result.csv", + "display_edges": false + } + } + ], + "centrality/tg_pagerank_wt": [ + { + "query_name": "tg_pagerank_wt", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "weight_attribute": "weight", + "max_change": 0.001, + "maximum_iteration": 25, + "damping": 0.85, + "top_k": 10, + "print_results": true, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_pagerank_wt/result.csv", + "display_edges": false + } + } + ], + "centrality/tg_pagerank_pers": [ + { + "query_name": "tg_pagerank_pers", + "parameters": { + "source": "117001", + "source.type": "MyNode", + "e_type": "MyEdge", + "max_change": 0.001, + "maximum_iteration": 25, + "damping": 0.85, + "top_k": 10, + "print_results": true, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_pagerank_pers/result.csv" + } + } + ], + "centrality/tg_pagerank_pers_ap_batch": [ + { + "query_name": "tg_pagerank_pers_ap_batch", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "max_change": 0.001, + "maximum_iteration": 25, + "damping": 0.85, + "top_k": 10, + "batch_num": 10, + "print_results": true, + "file_path": "/home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/result.csv" + } + } + ], + "centrality/tg_harmonic_cent": [ + { + "query_name": "tg_harmonic_cent", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "reverse_e_type_set": "rev_MyEdge", + "max_hops": 10, + "top_k": 10, + "wf": true, + "print_results": true, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_harmonic_cent/result.csv", + "display_edges": false + } + } + ], + "centrality/tg_article_rank": [ + { + "query_name": "tg_article_rank", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "max_change": 0.001, + "maximum_iteration": 25, + "damping": 0.85, + "top_k": 10, + "print_results": true, + "result_attribute": "", + "file_path": "/home/tigergraph/data/algos/centrality/tg_article_rank/result.csv" + } + } + ] + } +} diff --git a/algorithms_test/config/3_run_one.json b/algorithms_test/config/3_run_one.json new file mode 100644 index 00000000..16146553 --- /dev/null +++ b/algorithms_test/config/3_run_one.json @@ -0,0 +1,34 @@ +{ + "general_settings": { + "default_graph_name": "MyGraph", + "default_timeout_in_minutes": "15", + "default_output_directory": "~/data/algos", + "summary_file_path": "~/data/algos/summary.csv" + }, + "algorithms": { + "community/tg_label_prop": [ + { + "query_name": "tg_label_prop", + "parameters": { + "v_type_set": "MyNode", + "e_type_set": "MyEdge", + "maximum_iteration": 10, + "print_limit": 10, + "print_results": false, + "file_path": "/home/tigergraph/data/algos/community/tg_label_prop/result.csv" + } + } + ], + "community/tg_wcc_small_world": [ + { + "query_name": "tg_wcc_small_world", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "threshold": 100000, + "print_results": false + } + } + ] + } +} diff --git a/algorithms_test/gsql/MyGraph/1_create_schema.gsql b/algorithms_test/gsql/MyGraph/1_create_schema.gsql new file mode 100644 index 00000000..8007851d --- /dev/null +++ b/algorithms_test/gsql/MyGraph/1_create_schema.gsql @@ -0,0 +1,17 @@ +# 1. Create graph +CREATE GRAPH MyGraph () + +# 2. Create schema_change job +CREATE SCHEMA_CHANGE JOB change_schema_of_MyGraph FOR GRAPH MyGraph { + # 2.1 Create vertices + ADD VERTEX MyNode (PRIMARY_ID id STRING, community STRING) WITH PRIMARY_ID_AS_ATTRIBUTE="true"; + + # 2.2 Create edges + ADD DIRECTED EDGE MyEdge (FROM MyNode, TO MyNode, weight DOUBLE) WITH REVERSE_EDGE="rev_MyEdge"; +} + +# 3. Run schema_change job +RUN SCHEMA_CHANGE JOB change_schema_of_MyGraph + +# 4. Drop schema_change job +DROP JOB change_schema_of_MyGraph diff --git a/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql b/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql new file mode 100644 index 00000000..75081e25 --- /dev/null +++ b/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql @@ -0,0 +1,13 @@ +# 1. Use graph +USE GRAPH MyGraph + +# 2. Create loading job +CREATE LOADING JOB loading_job FOR GRAPH MyGraph { + DEFINE FILENAME f1; + + LOAD f1 TO VERTEX MyNode VALUES($0, _) USING SEPARATOR=" ", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO VERTEX MyNode VALUES($1, _) USING SEPARATOR=" ", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + + LOAD f1 TO EDGE MyEdge VALUES($0, $1, $0) USING SEPARATOR=" ", HEADER="true", EOL="\n", QUOTE="DOUBLE"; +} + diff --git a/algorithms_test/gsql/MyGraph2/1_create_schema.gsql b/algorithms_test/gsql/MyGraph2/1_create_schema.gsql new file mode 100644 index 00000000..cbcfb679 --- /dev/null +++ b/algorithms_test/gsql/MyGraph2/1_create_schema.gsql @@ -0,0 +1,17 @@ +# 1. Create graph +CREATE GRAPH MyGraph2 () + +# 2. Create schema_change job +CREATE SCHEMA_CHANGE JOB change_schema_of_MyGraph FOR GRAPH MyGraph2 { + # 2.1 Create vertices + ADD VERTEX MyNode (PRIMARY_ID id STRING, community STRING) WITH PRIMARY_ID_AS_ATTRIBUTE="true"; + + # 2.2 Create edges + ADD DIRECTED EDGE MyEdge (FROM MyNode, TO MyNode, weight DOUBLE) WITH REVERSE_EDGE="rev_MyEdge"; +} + +# 3. Run schema_change job +RUN SCHEMA_CHANGE JOB change_schema_of_MyGraph + +# 4. Drop schema_change job +DROP JOB change_schema_of_MyGraph diff --git a/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql b/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql new file mode 100644 index 00000000..ea2e31f4 --- /dev/null +++ b/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql @@ -0,0 +1,13 @@ +# 1. Use graph +USE GRAPH MyGraph2 + +# 2. Create loading job +CREATE LOADING JOB loading_job FOR GRAPH MyGraph2 { + DEFINE FILENAME f1; + + LOAD f1 TO VERTEX MyNode VALUES($0, _) USING SEPARATOR="\t", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO VERTEX MyNode VALUES($1, _) USING SEPARATOR="\t", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + + LOAD f1 TO EDGE MyEdge VALUES($0, $1, $0) USING SEPARATOR="\t", HEADER="true", EOL="\n", QUOTE="DOUBLE"; +} + diff --git a/algorithms_test/mem/1_start.sh b/algorithms_test/mem/1_start.sh new file mode 100755 index 00000000..50f6d173 --- /dev/null +++ b/algorithms_test/mem/1_start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +rm ~/mem/*.log +mkdir -p ~/mem + +sleep 1; + +mem_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" + +sh ${mem_dir}/run_free.sh >> ~/mem/mem.log 2>&1 & diff --git a/algorithms_test/mem/2_peak.sh b/algorithms_test/mem/2_peak.sh new file mode 100755 index 00000000..dbefafa2 --- /dev/null +++ b/algorithms_test/mem/2_peak.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +grep 'Mem:' ~/mem/mem.log > ~/mem/prof.log; + +mem_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +awk -f ${mem_dir}/peak.awk ~/mem/prof.log >> ~/mem/peak.log; + +tail -n 1 ~/mem/peak.log diff --git a/algorithms_test/mem/3_reset.sh b/algorithms_test/mem/3_reset.sh new file mode 100755 index 00000000..86b91576 --- /dev/null +++ b/algorithms_test/mem/3_reset.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# Set up initial variables +check_count=0 +max_checks=100 +mem_diff_threshold=0.01 +mem_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" + +# Function to get memory stats and check the difference +check_memory() { + # Run the memory check commands + grep 'Mem:' ~/mem/mem.log > ~/mem/prof.log + awk -f ${mem_dir}/peak.awk ~/mem/prof.log >> ~/mem/peak.log + + # Get the latest memory stats + latest_stats=$(tail -n 1 ~/mem/peak.log) + + # Extract the max - min memory difference + mem_diff=$(echo "$latest_stats" | awk -F';' '{print $4}' | awk -F' ' '{print $6}') + + # Convert mem_diff to a number + mem_diff=$(echo "$mem_diff" | sed 's/GB//') + + # Return the memory difference + echo "$mem_diff" +} + +# Main loop +while [ $check_count -lt $max_checks ]; do + check_count=$((check_count + 1)) + + # Check memory and get the difference + mem_diff=$(check_memory) + + # Check if the difference is less than the threshold + if (( $(echo "$mem_diff < $mem_diff_threshold" | bc -l) )); then + break + fi + + # Run the reset script and sleep for 1 second + > ~/mem/mem.log + sleep 1 +done + diff --git a/algorithms_test/mem/4_stop.sh b/algorithms_test/mem/4_stop.sh new file mode 100755 index 00000000..c1806840 --- /dev/null +++ b/algorithms_test/mem/4_stop.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +ps -ef | grep "./run_free.sh" | grep -v grep | awk '{print $2}' | xargs kill; diff --git a/algorithms_test/mem/peak.awk b/algorithms_test/mem/peak.awk new file mode 100644 index 00000000..93db5eab --- /dev/null +++ b/algorithms_test/mem/peak.awk @@ -0,0 +1,25 @@ +# initialization for line 1 +NR == 1 { + max_mem = $3; + min_mem = $3; +} +# memory +NR > 1 && $3 > max_mem { + max_mem = $3; +} +NR > 1 && $3 < min_mem { + min_mem = $3; +} +# sum +{ + sum_mem += $3; + row_num ++; +} +# print results after processing the file +END { + if(row_num > 0) avg_mem = sum_mem / row_num / 1024 / 1024; + max_mem = max_mem / 1024 / 1024; + min_mem = min_mem / 1024 / 1024; + diff_mem = max_mem - min_mem; + printf "row num: %0.0f. max mem: %0.2f GB; min mem: %0.2f GB; avg mem: %0.2f GB; max mem - min mem: %0.2f GB.\n", row_num, max_mem, min_mem, avg_mem, diff_mem; +} diff --git a/algorithms_test/mem/run_free.sh b/algorithms_test/mem/run_free.sh new file mode 100755 index 00000000..f51aad3a --- /dev/null +++ b/algorithms_test/mem/run_free.sh @@ -0,0 +1,8 @@ +#!/usr/bin/bash +# monitor the memory for one week +for i in $(seq 1 604800) +do + date + free | grep Mem + sleep 1 +done diff --git a/algorithms_test/tools/search_for_gsql_files.sh b/algorithms_test/tools/search_for_gsql_files.sh new file mode 100755 index 00000000..32fc77db --- /dev/null +++ b/algorithms_test/tools/search_for_gsql_files.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Directory to search for .gsql files +search_dir="../../algorithms" + +# Find all .gsql files under the search directory recursively +gsql_files=$(find "$search_dir" -type f -name "*.gsql") + +# Check if any .gsql files were found +if [ -z "$gsql_files" ]; then + echo "No .gsql files found in $search_dir" +else + echo "Found the following .gsql files:" + echo "$gsql_files" +fi + From aabbd3c06f58fcc8283d19fb945fe9e167f19538 Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Wed, 10 Jul 2024 05:41:51 +0000 Subject: [PATCH 2/9] [ALGOS-263] feat(algos): increase sleep time; modify the parameters for tg_mst; --- algorithms_test/config/3_run.json | 1 + algorithms_test/mem/3_reset.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/algorithms_test/config/3_run.json b/algorithms_test/config/3_run.json index ed3cc5f8..99d551a2 100644 --- a/algorithms_test/config/3_run.json +++ b/algorithms_test/config/3_run.json @@ -400,6 +400,7 @@ "e_type_set": "MyEdge", "weight_attribute": "weigth", "weight_type": "DOUBLE", + "maximum_iteration": 100, "print_results": false, "result_attribute": "", "file_path": "/home/tigergraph/data/algos/path/tg_mst/result.csv" diff --git a/algorithms_test/mem/3_reset.sh b/algorithms_test/mem/3_reset.sh index 86b91576..e745f3fb 100755 --- a/algorithms_test/mem/3_reset.sh +++ b/algorithms_test/mem/3_reset.sh @@ -39,6 +39,6 @@ while [ $check_count -lt $max_checks ]; do # Run the reset script and sleep for 1 second > ~/mem/mem.log - sleep 1 + sleep 2 done From 2ba501b461c408cf0f84fc766229a8c29ef746d9 Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Wed, 10 Jul 2024 05:42:31 +0000 Subject: [PATCH 3/9] [ALGOS-263] feat(algos): convert some of the algorithms from singlde mode to distributed mode; --- algorithms/Centrality/article_rank/tg_article_rank.gsql | 2 +- .../closeness/approximate/tg_closeness_cent_approx.gsql | 2 +- algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql | 2 +- .../Centrality/degree/weighted/tg_weighted_degree_cent.gsql | 2 +- algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql | 2 +- algorithms/Centrality/harmonic/tg_harmonic_cent.gsql | 2 +- .../CELF/tg_influence_maximization_CELF.gsql | 4 ++-- .../greedy/tg_influence_maximization_greedy.gsql | 2 +- .../Centrality/pagerank/global/unweighted/tg_pagerank.gsql | 2 +- .../Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql | 2 +- .../personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql | 2 +- .../pagerank/personalized/multi_source/tg_pagerank_pers.gsql | 2 +- .../greedy_graph_coloring/tg_greedy_graph_coloring.gsql | 2 +- .../k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql | 2 +- .../deterministic/tg_maximal_indep_set.gsql | 2 +- .../strongly_connected_components/standard/tg_scc.gsql | 2 +- .../weakly_connected_components/standard/tg_wcc.gsql | 2 +- algorithms/Community/k_core/tg_kcore.gsql | 2 +- algorithms/Community/label_propagation/tg_label_prop.gsql | 2 +- algorithms/Community/local_clustering_coefficient/tg_lcc.gsql | 2 +- algorithms/Community/louvain/tg_louvain.gsql | 2 +- .../Community/speaker-listener_label_propagation/tg_slpa.gsql | 2 +- algorithms/Path/bfs/tg_bfs.gsql | 2 +- algorithms/Path/cycle_component/tg_cycle_component.gsql | 2 +- algorithms/Path/minimum_spanning_forest/tg_msf.gsql | 2 +- algorithms/Path/minimum_spanning_tree/tg_mst.gsql | 2 +- .../path_between_two_vertices/one_direction/tg_all_path.gsql | 2 +- .../Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql | 2 +- .../weighted/any_sign/tg_shortest_ss_any_wt.gsql | 2 +- .../weighted/positive/summary/tg_shortest_ss_pos_wt.gsql | 2 +- .../weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql | 2 +- 31 files changed, 32 insertions(+), 32 deletions(-) diff --git a/algorithms/Centrality/article_rank/tg_article_rank.gsql b/algorithms/Centrality/article_rank/tg_article_rank.gsql index f16c00c9..85a83bf6 100644 --- a/algorithms/Centrality/article_rank/tg_article_rank.gsql +++ b/algorithms/Centrality/article_rank/tg_article_rank.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_article_rank (STRING v_type, STRING e_type, +CREATE DISTRIBUTED QUERY tg_article_rank (STRING v_type, STRING e_type, FLOAT max_change = 0.001, INT maximum_iteration = 25, FLOAT damping = 0.85, INT top_k = 100, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX V1 { diff --git a/algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql b/algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql index 1e2fef63..52dee2df 100644 --- a/algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql +++ b/algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_closeness_cent_approx ( +CREATE DISTRIBUTED QUERY tg_closeness_cent_approx ( SET v_type_set, SET e_type_set, STRING reverse_e_type, INT top_k=100, INT k = 100, INT max_hops = 10, DOUBLE epsilon = 0.1, BOOL print_results = true, STRING file_path = "", INT debug = 0, INT sample_index = 0, INT max_size = 1000, BOOL wf = True ) SYNTAX V1 { diff --git a/algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql b/algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql index e3efb225..e2cf2559 100644 --- a/algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql +++ b/algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_degree_cent(SET v_type_set, SET e_type_set, SET reverse_e_type_set, BOOL in_degree = TRUE, BOOL out_degree = TRUE, +CREATE DISTRIBUTED QUERY tg_degree_cent(SET v_type_set, SET e_type_set, SET reverse_e_type_set, BOOL in_degree = TRUE, BOOL out_degree = TRUE, INT top_k=100, BOOL print_results = TRUE, STRING result_attribute = "",STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql b/algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql index a284380b..c7593574 100644 --- a/algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql +++ b/algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_weighted_degree_cent(STRING v_type, STRING e_type, STRING reverse_e_type, string weight_attribute, BOOL in_degree = TRUE, BOOL out_degree = TRUE, INT top_k=100, BOOL print_results = TRUE, STRING result_attribute = "",STRING file_path = "") SYNTAX V1 { +CREATE DISTRIBUTED QUERY tg_weighted_degree_cent(STRING v_type, STRING e_type, STRING reverse_e_type, string weight_attribute, BOOL in_degree = TRUE, BOOL out_degree = TRUE, INT top_k=100, BOOL print_results = TRUE, STRING result_attribute = "",STRING file_path = "") SYNTAX V1 { /* First Author: diff --git a/algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql b/algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql index e3af4744..e98ca65e 100644 --- a/algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql +++ b/algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_eigenvector_cent(SET v_type_set, SET e_type_set, INT maximum_iteration = 100, FLOAT conv_limit = 0.000001, +CREATE DISTRIBUTED QUERY tg_eigenvector_cent(SET v_type_set, SET e_type_set, INT maximum_iteration = 100, FLOAT conv_limit = 0.000001, INT top_k = 100, BOOL print_results = True, STRING result_attribute = "",STRING file_path = "" ) SYNTAX V1 { diff --git a/algorithms/Centrality/harmonic/tg_harmonic_cent.gsql b/algorithms/Centrality/harmonic/tg_harmonic_cent.gsql index a981d972..13b70b6a 100644 --- a/algorithms/Centrality/harmonic/tg_harmonic_cent.gsql +++ b/algorithms/Centrality/harmonic/tg_harmonic_cent.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_harmonic_cent(SET v_type_set, SET e_type_set, SET reverse_e_type_set,INT max_hops = 10, +CREATE DISTRIBUTED QUERY tg_harmonic_cent(SET v_type_set, SET e_type_set, SET reverse_e_type_set,INT max_hops = 10, INT top_k = 100, BOOL wf = TRUE, BOOL print_results = True, STRING result_attribute = "", STRING file_path = "", BOOL display_edges = FALSE) SYNTAX V1 { diff --git a/algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql b/algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql index 48440f9b..080b81f3 100644 --- a/algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql +++ b/algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_influence_maximization_CELF(STRING v_type,STRING e_type,STRING weight_attribute,INT top_k, +CREATE DISTRIBUTED QUERY tg_influence_maximization_CELF(STRING v_type,STRING e_type,STRING weight_attribute,INT top_k, BOOL print_results = True, STRING file_path = "") SYNTAX V1 { /* @@ -128,4 +128,4 @@ CREATE QUERY tg_influence_maximization_CELF(STRING v_type,STRING e_type,STRING w IF print_results THEN PRINT @@res_list; END; -} \ No newline at end of file +} diff --git a/algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql b/algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql index 3731a287..5c1f928e 100644 --- a/algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql +++ b/algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_influence_maximization_greedy(STRING v_type,STRING e_type,STRING weight_attribute,INT top_k, +CREATE DISTRIBUTED QUERY tg_influence_maximization_greedy(STRING v_type,STRING e_type,STRING weight_attribute,INT top_k, BOOL print_results = True, STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql b/algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql index a00155bd..8f6e93fd 100644 --- a/algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql +++ b/algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_pagerank (STRING v_type, STRING e_type, +CREATE DISTRIBUTED QUERY tg_pagerank (STRING v_type, STRING e_type, FLOAT max_change=0.001, INT maximum_iteration=25, FLOAT damping=0.85, INT top_k = 100, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "", BOOL display_edges = FALSE) SYNTAX V1 { diff --git a/algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql b/algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql index 2d824e1e..7726d8a0 100644 --- a/algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql +++ b/algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_pagerank_wt (STRING v_type, STRING e_type, STRING weight_attribute, +CREATE DISTRIBUTED QUERY tg_pagerank_wt (STRING v_type, STRING e_type, STRING weight_attribute, FLOAT max_change=0.001, INT maximum_iteration=25, FLOAT damping=0.85, INT top_k = 100, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "", BOOL display_edges = FALSE) SYNTAX V1 { diff --git a/algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql b/algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql index e770576d..8ddb6ec1 100644 --- a/algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql +++ b/algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_pagerank_pers_ap_batch(STRING v_type, STRING e_type, +CREATE DISTRIBUTED QUERY tg_pagerank_pers_ap_batch(STRING v_type, STRING e_type, FLOAT max_change=0.001, INT maximum_iteration=25, FLOAT damping = 0.85, INT top_k = 100,INT batch_num,BOOL print_results,STRING file_path) SYNTAX V1 { /* diff --git a/algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql b/algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql index cc3934a5..a8f1f873 100644 --- a/algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql +++ b/algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_pagerank_pers(SET source, STRING e_type, +CREATE DISTRIBUTED QUERY tg_pagerank_pers(SET source, STRING e_type, FLOAT max_change=0.001, INT maximum_iteration=25, FLOAT damping = 0.85, INT top_k = 100, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX V1 { diff --git a/algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql b/algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql index d7c4eaaa..365cb02b 100644 --- a/algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql +++ b/algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_greedy_graph_coloring(SET v_type_set,SET e_type_set,UINT max_colors = 999999, +CREATE DISTRIBUTED QUERY tg_greedy_graph_coloring(SET v_type_set,SET e_type_set,UINT max_colors = 999999, BOOL print_color_count = TRUE, BOOL print_stats = TRUE, STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql b/algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql index e7ed3eae..f127721a 100644 --- a/algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql +++ b/algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_knn_cosine_ss (VERTEX source, SET v_type_set, SET e_type_set, SET reverse_e_type_set, STRING weight_attribute, +CREATE DISTRIBUTED QUERY tg_knn_cosine_ss (VERTEX source, SET v_type_set, SET e_type_set, SET reverse_e_type_set, STRING weight_attribute, STRING label, INT top_k, BOOL print_results = TRUE, STRING file_path = "", STRING result_attribute = "") RETURNS (STRING) SYNTAX V1 { /* diff --git a/algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql b/algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql index aee9a0f7..5ff61956 100644 --- a/algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql +++ b/algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_maximal_indep_set(STRING v_type, STRING e_type, INT maximum_iteration = 100, BOOL print_results = TRUE, STRING file_path = "") SYNTAX V1 { +CREATE DISTRIBUTED QUERY tg_maximal_indep_set(STRING v_type, STRING e_type, INT maximum_iteration = 100, BOOL print_results = TRUE, STRING file_path = "") SYNTAX V1 { /* First Author: diff --git a/algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql b/algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql index 52559faa..31e22dba 100644 --- a/algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql +++ b/algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_scc (SET v_type_set, SET e_type_set, SET reverse_e_type_set, +CREATE DISTRIBUTED QUERY tg_scc (SET v_type_set, SET e_type_set, SET reverse_e_type_set, INT top_k_dist, INT print_limit, INT maximum_iteration = 500, INT iter_wcc = 5, BOOL print_results = TRUE, STRING result_attribute= "", STRING file_path="") SYNTAX V1 { //INT iter_end_trim = 3 diff --git a/algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql b/algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql index 94046cd7..3227a681 100644 --- a/algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql +++ b/algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_wcc (SET v_type_set, SET e_type_set, INT print_limit = 100, +CREATE DISTRIBUTED QUERY tg_wcc (SET v_type_set, SET e_type_set, INT print_limit = 100, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Community/k_core/tg_kcore.gsql b/algorithms/Community/k_core/tg_kcore.gsql index 6dedde91..dc218741 100644 --- a/algorithms/Community/k_core/tg_kcore.gsql +++ b/algorithms/Community/k_core/tg_kcore.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_kcore(STRING v_type, STRING e_type, INT k_min = 0, INT k_max = -1, BOOL print_results = TRUE, +CREATE DISTRIBUTED QUERY tg_kcore(STRING v_type, STRING e_type, INT k_min = 0, INT k_max = -1, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "", BOOL print_all_k = FALSE, BOOL show_shells=FALSE) SYNTAX V1 { /* diff --git a/algorithms/Community/label_propagation/tg_label_prop.gsql b/algorithms/Community/label_propagation/tg_label_prop.gsql index dd410b44..9a6329dd 100644 --- a/algorithms/Community/label_propagation/tg_label_prop.gsql +++ b/algorithms/Community/label_propagation/tg_label_prop.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_label_prop (SET v_type_set, SET e_type_set, INT maximum_iteration, INT print_limit, +CREATE DISTRIBUTED QUERY tg_label_prop (SET v_type_set, SET e_type_set, INT maximum_iteration, INT print_limit, BOOL print_results = TRUE, STRING file_path = "", STRING result_attribute = "") SYNTAX V1 { diff --git a/algorithms/Community/local_clustering_coefficient/tg_lcc.gsql b/algorithms/Community/local_clustering_coefficient/tg_lcc.gsql index ce100160..caba83fc 100644 --- a/algorithms/Community/local_clustering_coefficient/tg_lcc.gsql +++ b/algorithms/Community/local_clustering_coefficient/tg_lcc.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_lcc (STRING v_type, STRING e_type,INT top_k=100,BOOL print_results = True, STRING result_attribute = "", +CREATE DISTRIBUTED QUERY tg_lcc (STRING v_type, STRING e_type,INT top_k=100,BOOL print_results = True, STRING result_attribute = "", STRING file_path = "", BOOL display_edges = FALSE) SYNTAX V1 { /* diff --git a/algorithms/Community/louvain/tg_louvain.gsql b/algorithms/Community/louvain/tg_louvain.gsql index c39a12c2..433f5028 100644 --- a/algorithms/Community/louvain/tg_louvain.gsql +++ b/algorithms/Community/louvain/tg_louvain.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_louvain(SET v_type_set, SET e_type_set, STRING weight_attribute = "weight", INT maximum_iteration = 10, +CREATE DISTRIBUTED QUERY tg_louvain(SET v_type_set, SET e_type_set, STRING weight_attribute = "weight", INT maximum_iteration = 10, STRING result_attribute = "cid", STRING file_path = "", BOOL print_stats = FALSE) SYNTAX V1 { /* diff --git a/algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql b/algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql index 2b0d571d..47857626 100644 --- a/algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql +++ b/algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_slpa (SET v_type_set, SET e_type_set, FLOAT threshold, INT maximum_iteration, INT print_limit, +CREATE DISTRIBUTED QUERY tg_slpa (SET v_type_set, SET e_type_set, FLOAT threshold, INT maximum_iteration, INT print_limit, BOOL print_results = TRUE, STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Path/bfs/tg_bfs.gsql b/algorithms/Path/bfs/tg_bfs.gsql index 3c90b2de..b4b65e17 100644 --- a/algorithms/Path/bfs/tg_bfs.gsql +++ b/algorithms/Path/bfs/tg_bfs.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_bfs(SET v_type_set, SET e_type_set,INT max_hops=10, VERTEX v_start, +CREATE DISTRIBUTED QUERY tg_bfs(SET v_type_set, SET e_type_set,INT max_hops=10, VERTEX v_start, BOOL print_results = True, STRING result_attribute = "",STRING file_path = "", BOOL display_edges = TRUE) SYNTAX V1 { /* diff --git a/algorithms/Path/cycle_component/tg_cycle_component.gsql b/algorithms/Path/cycle_component/tg_cycle_component.gsql index ad235cb6..75c3be79 100644 --- a/algorithms/Path/cycle_component/tg_cycle_component.gsql +++ b/algorithms/Path/cycle_component/tg_cycle_component.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_cycle_component(STRING v_type,STRING e_type,BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX v1{ +CREATE DISTRIBUTED QUERY tg_cycle_component(STRING v_type,STRING e_type,BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX v1{ /* diff --git a/algorithms/Path/minimum_spanning_forest/tg_msf.gsql b/algorithms/Path/minimum_spanning_forest/tg_msf.gsql index 2263fe72..e93d03b9 100644 --- a/algorithms/Path/minimum_spanning_forest/tg_msf.gsql +++ b/algorithms/Path/minimum_spanning_forest/tg_msf.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_msf (SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, +CREATE DISTRIBUTED QUERY tg_msf (SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Path/minimum_spanning_tree/tg_mst.gsql b/algorithms/Path/minimum_spanning_tree/tg_mst.gsql index 4eb05609..78922fae 100644 --- a/algorithms/Path/minimum_spanning_tree/tg_mst.gsql +++ b/algorithms/Path/minimum_spanning_tree/tg_mst.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_mst(VERTEX opt_source, SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, +CREATE DISTRIBUTED QUERY tg_mst(VERTEX opt_source, SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, INT maximum_iteration = -1, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "") SYNTAX V1 { /* diff --git a/algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql b/algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql index 59b33728..e5202455 100644 --- a/algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql +++ b/algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_all_path(VERTEX v_source, VERTEX target_v, INT depth = 10, +CREATE DISTRIBUTED QUERY tg_all_path(VERTEX v_source, VERTEX target_v, INT depth = 10, BOOL print_results = TRUE, STRING file_path = "")SYNTAX v1 { /* diff --git a/algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql b/algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql index f8903e51..a4f8d04c 100644 --- a/algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql +++ b/algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_shortest_ss_no_wt (VERTEX source, SET v_type_set, SET e_type_set, +CREATE DISTRIBUTED QUERY tg_shortest_ss_no_wt (VERTEX source, SET v_type_set, SET e_type_set, INT print_limit = -1, BOOL print_results =TRUE, STRING result_attribute ="", STRING file_path ="", BOOL display_edges =FALSE) SYNTAX V1 { diff --git a/algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql b/algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql index a27f93d6..f430325f 100644 --- a/algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql +++ b/algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_shortest_ss_any_wt (VERTEX source, SET v_type_set, SET e_type_set, +CREATE DISTRIBUTED QUERY tg_shortest_ss_any_wt (VERTEX source, SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, INT print_limit = -1, BOOL print_results = TRUE, STRING result_attribute = "", STRING file_path = "", BOOL display_edges = FALSE) SYNTAX V1 { diff --git a/algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql b/algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql index 665502da..db30f2d6 100644 --- a/algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql +++ b/algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_shortest_ss_pos_wt (VERTEX source, SET v_type_set, SET e_type_set, +CREATE DISTRIBUTED QUERY tg_shortest_ss_pos_wt (VERTEX source, SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, FLOAT epsilon = 0.001,BOOL print_results = TRUE, INT print_limit = -1, BOOL display_edges = FALSE, STRING result_attribute = "", STRING file_path = "") SYNTAX V1 { diff --git a/algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql b/algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql index a89d48cb..5079d063 100644 --- a/algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql +++ b/algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql @@ -1,4 +1,4 @@ -CREATE QUERY tg_shortest_ss_pos_wt_tb (VERTEX source, SET v_type_set, SET e_type_set, +CREATE DISTRIBUTED QUERY tg_shortest_ss_pos_wt_tb (VERTEX source, SET v_type_set, SET e_type_set, STRING weight_attribute, STRING weight_type, FLOAT epsilon = 0.001,BOOL print_results = TRUE, INT print_limit = -1, BOOL display_edges = FALSE, STRING result_attribute = "", STRING file_path = "", UINT write_size = 10000) SYNTAX V1 { From b9209a643c5fec71309dfc59f236d72c1e1642fa Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Wed, 10 Jul 2024 06:24:04 +0000 Subject: [PATCH 4/9] [ALGOS-263] feat(algos): Remove ReadMe.md; --- algorithms_test/ReadMe.md | 733 -------------------------------------- 1 file changed, 733 deletions(-) delete mode 100644 algorithms_test/ReadMe.md diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md deleted file mode 100644 index 5dfa3f9f..00000000 --- a/algorithms_test/ReadMe.md +++ /dev/null @@ -1,733 +0,0 @@ -# Setup -``` -algorithms_test (ALGOS-263) $ ./2_setup.sh -======================================== MyGraph ======================================== -Dropping the graph MyGraph... -Successfully dropped jobs on the graph 'MyGraph': [loading_job]. -All jobs on the graph 'MyGraph' are dropped. -Successfully dropped queries on the graph 'MyGraph': [tg_wcc, tg_shortest_ss_any_wt, tg_pagerank_pers_ap_batch, tg_tri_count_fast, tg_louvain, tg_shortest_ss_no_wt, tg_mst, tg_scc_small_world, tg_scc, tg_embedding_pairwise_cosine_similarity, tg_all_path, tg_tri_count, tg_all_path_bidirection, tg_knn_cosine_all, tg_fpm_pre, tg_common_neighbors, tg_maximal_indep_set, tg_eigenvector_cent, tg_pagerank_pers, tg_embedding_cosine_similarity, tg_pagerank_wt, tg_harmonic_cent, tg_bfs, tg_closeness_cent_approx, tg_astar, tg_degree_cent, tg_maximal_indep_set_random, tg_wcc_small_world, tg_greedy_graph_coloring, tg_betweenness_cent, tg_resource_allocation, tg_fastRP, tg_jaccard_nbor_ap_batch, tg_knn_cosine_cv_sub, tg_kmeans_sub, tg_influence_maximization_greedy, tg_max_BFS_depth, tg_lcc, tg_closeness_cent, tg_slpa, tg_adamic_adar, tg_cosine_nbor_ss, tg_cosine_nbor_ap_batch, tg_total_neighbors, tg_knn_cosine_all_sub, tg_same_community, tg_pagerank, tg_fpm, tg_kmeans, tg_map_equation, tg_estimate_diameter, tg_cycle_detection, tg_cycle_detection_batch, tg_knn_cosine_cv, tg_kcore, tg_article_rank, tg_weisfeiler_lehman, tg_jaccard_nbor_ss, tg_maxflow, tg_cycle_component, tg_weighted_degree_cent, tg_shortest_ss_pos_wt, tg_label_prop, tg_knn_cosine_ss, tg_shortest_ss_pos_wt_tb, tg_cycle_detection_count, tg_preferential_attachment, tg_msf, tg_influence_maximization_CELF, tmp1]. -All queries on the graph 'MyGraph' are dropped. -The graph MyGraph is dropped. -Finished dropping graph MyGraph. --------------------------------------------------------------------------------- -Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/1_create_schema.gsql -The graph MyGraph is created. -Successfully created schema change jobs: [change_schema_of_MyGraph]. -WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. -Kick off schema change job change_schema_of_MyGraph -Doing schema change on graph 'MyGraph' (current version: 0) -Trying to add local vertex 'MyNode' to the graph 'MyGraph'. -Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph'. - -Graph MyGraph updated to new version 1 -The job change_schema_of_MyGraph completes in 0.900 seconds! -Local schema change succeeded. -Successfully dropped jobs on the graph 'MyGraph': [change_schema_of_MyGraph]. --------------------------------------------------------------------------------- -Running: Creating loading job /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql -Using graph 'MyGraph' -Successfully created loading jobs: [loading_job]. --------------------------------------------------------------------------------- -Running loading job for /home/tigergraph/data/public/zhishi-all/out.zhishi-all... -[Tip: Use "CTRL + C" to stop displaying the loading status update, then use "SHOW LOADING STATUS " to track the loading progress again] -[Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] -Running the following loading job: - Job name: loading_job - Jobid: MyGraph.loading_job.file.m1.1720513082456 - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1720513082456 -Job "MyGraph.loading_job.file.m1.1720513082456" loading status -Current timestamp is 2024-07-09 08:19:08.627 -Loading status was last updated at 2024-07-09 08:19:07.896. -[FINISHED] m1 ( Finished: 1 / Total: 1 ) - +-----------------------------------------------------------------------------------------------------+ - | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| - |zhishi-all/out.zhishi-all | 65905159 | 197715477 | 0 | 1014 kl/s | 64.97 s | 100 %| - +-----------------------------------------------------------------------------------------------------+ -LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1720513082456 - Job ID: MyGraph.loading_job.file.m1.1720513082456-----------------------------------------------------+ - Elapsed time: 65 sec - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1720513082456 - Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1720513082456/summary - -Finished running loading job for /home/tigergraph/data/public/zhishi-all/out.zhishi-all. --------------------------------------------------------------------------------- -All queries are dropped. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/jaccard/all_pairs/tg_jaccard_nbor_ap_batch.gsql -Successfully created queries: [tg_jaccard_nbor_ap_batch]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/jaccard/single_source/tg_jaccard_nbor_ss.gsql -Successfully created queries: [tg_jaccard_nbor_ss]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/cosine/all_pairs/tg_cosine_nbor_ap_batch.gsql -Warning in query tg_cosine_nbor_ap_batch (WARN-5): line 80, col 39 -The comparison 'divisor==0' may lead to unexpected behavior because it involves -equality test between float/double numeric values. We suggest to do such -comparison with an error margin, e.g. 'abs((divisor) - (0)) < epsilon', where -epsilon is a very small positive value of your choice, such as 0.0001. -Successfully created queries: [tg_cosine_nbor_ap_batch]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Similarity/cosine/single_source/tg_cosine_nbor_ss.gsql -Successfully created queries: [tg_cosine_nbor_ss]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/preferential_attachment/tg_preferential_attachment.gsql -Successfully created queries: [tg_preferential_attachment]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/resource_allocation/tg_resource_allocation.gsql -Successfully created queries: [tg_resource_allocation]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/same_community/tg_same_community.gsql -Successfully created queries: [tg_same_community]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/adamic_adar/tg_adamic_adar.gsql -Successfully created queries: [tg_adamic_adar]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/total_neighbors/tg_total_neighbors.gsql -Successfully created queries: [tg_total_neighbors]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Topological Link Prediction/common_neighbors/tg_common_neighbors.gsql -Successfully created queries: [tg_common_neighbors]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/greedy_graph_coloring/tg_greedy_graph_coloring.gsql -Successfully created queries: [tg_greedy_graph_coloring]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/maximal_independent_set/deterministic/tg_maximal_indep_set.gsql -Successfully created queries: [tg_maximal_indep_set]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/maximal_independent_set/random/tg_maximal_indep_set_random.gsql -Successfully created queries: [tg_maximal_indep_set_random]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/all_pairs/tg_knn_cosine_all.gsql - -Semantic Check Error in query tg_knn_cosine_all (SEM-45): line 56, col 49 -The tuple name or the function tg_knn_cosine_all_sub is not defined. -Saved as draft query with type/semantic error: [tg_knn_cosine_all]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/all_pairs/tg_knn_cosine_all_sub.gsql -Successfully created queries: [tg_knn_cosine_all_sub]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/single_source/tg_knn_cosine_ss.gsql -Successfully created queries: [tg_knn_cosine_ss]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/cross_validation/tg_knn_cosine_cv_sub.gsql -Successfully created queries: [tg_knn_cosine_cv_sub]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Classification/k_nearest_neighbors/cross_validation/tg_knn_cosine_cv.gsql -Successfully created queries: [tg_knn_cosine_cv]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/EmbeddingSimilarity/pairwise/tg_embedding_pairwise_cosine_sim.gsql -Successfully created queries: [tg_embedding_pairwise_cosine_similarity]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/EmbeddingSimilarity/single_source/tg_embedding_cosine_sim.gsql -Successfully created queries: [tg_embedding_cosine_similarity]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/weisfeiler_lehman/tg_weisfeiler_lehman.gsql -Successfully created queries: [tg_weisfeiler_lehman]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/GraphML/Embeddings/FastRP/tg_fastRP.gsql -Warning in query tg_fastRP (WARN-5): line 209, col 12 -The comparison 'mr<=p1' may lead to unexpected behavior because it involves -equality test between float/double numeric values. We suggest to do such -comparison with an error margin, e.g. 'mr<=p1 + epsilon', where epsilon is a -very small positive value of your choice, such as 0.0001. -Warning in query tg_fastRP (WARN-5): line 211, col 17 -The comparison 'mr<=p1+p2' may lead to unexpected behavior because it involves -equality test between float/double numeric values. We suggest to do such -comparison with an error margin, e.g. 'mr<=p1+p2 + epsilon', where epsilon is a -very small positive value of your choice, such as 0.0001. -Warning in query tg_fastRP (WARN-5): line 238, col 13 -The comparison 'square_sum==0.0' may lead to unexpected behavior because it -involves equality test between float/double numeric values. We suggest to do -such comparison with an error margin, e.g. 'abs((square_sum) - (0.0)) < -epsilon', where epsilon is a very small positive value of your choice, such as -0.0001. -Successfully created queries: [tg_fastRP]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/speaker-listener_label_propagation/tg_slpa.gsql -Successfully created queries: [tg_slpa]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/label_propagation/tg_label_prop.gsql -Successfully created queries: [tg_label_prop]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/k_means/tg_kmeans_sub.gsql - -Type Check Error in query tg_kmeans_sub (TYP-158): line 43, col 33 -'t.embeddings' indicates no valid vertex type. -Possible reasons: - -- The expression refers to a primary_id, which is not directly -usable in the query body. To use primary_id, declare it as an -attribute. E.g "CREATE VERTEX Person (PRIMARY_ID ssn string, ssn string, age -int)" -- The expression has misspelled an attribute, or a vertex name - -Saved as draft query with type/semantic error: [tg_kmeans_sub]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/k_means/tg_kmeans.gsql - -Type Check Error in query tg_kmeans (TYP-5401): line 50, col 29 -Query 'tg_kmeans_sub' cannot be used as an expression, because it does not have -a return type. -Saved as draft query with type/semantic error: [tg_kmeans]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/triangle_counting/fast/tg_tri_count_fast.gsql -Successfully created queries: [tg_tri_count_fast]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/triangle_counting/standard/tg_tri_count.gsql -Successfully created queries: [tg_tri_count]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/strongly_connected_components/small_world/tg_scc_small_world.gsql -Successfully created queries: [tg_scc_small_world]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/strongly_connected_components/standard/tg_scc.gsql -Successfully created queries: [tg_scc]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/weakly_connected_components/small_world/tg_wcc_small_world.gsql -Successfully created queries: [tg_wcc_small_world]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/weakly_connected_components/standard/tg_wcc.gsql -Successfully created queries: [tg_wcc]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/k_core/tg_kcore.gsql -Successfully created queries: [tg_kcore]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/map_equation/tg_map_equation.gsql -Successfully created queries: [tg_map_equation]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/local_clustering_coefficient/tg_lcc.gsql -Successfully created queries: [tg_lcc]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/louvain/tg_louvain.gsql -Warning in query tg_louvain (WARN-5): line 97, col 25 -The comparison '-t.@max_best_move.weight==t.@sum_cc_weight' may lead to -unexpected behavior because it involves equality test between float/double -numeric values. We suggest to do such comparison with an error margin, e.g. -'abs((-t.@max_best_move.weight) - (t.@sum_cc_weight)) < epsilon', where epsilon -is a very small positive value of your choice, such as 0.0001. -Warning in query tg_louvain (WARN-5): line 173, col 29 -The comparison '-s.@max_best_move.weight==s.@sum_cc_weight' may lead to -unexpected behavior because it involves equality test between float/double -numeric values. We suggest to do such comparison with an error margin, e.g. -'abs((-s.@max_best_move.weight) - (s.@sum_cc_weight)) < epsilon', where epsilon -is a very small positive value of your choice, such as 0.0001. -Successfully created queries: [tg_louvain]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_component/tg_cycle_component.gsql -Successfully created queries: [tg_cycle_component]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/unweighted/tg_shortest_ss_no_wt.gsql -Successfully created queries: [tg_shortest_ss_no_wt]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/weighted/any_sign/tg_shortest_ss_any_wt.gsql -Successfully created queries: [tg_shortest_ss_any_wt]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/weighted/positive/summary/tg_shortest_ss_pos_wt.gsql -Warning in query tg_shortest_ss_pos_wt (WARN-5): line 102, col 20 -The comparison 's.@min_prev_path!=-1' may lead to unexpected behavior because it -involves equality test between float/double numeric values. We suggest to do -such comparison with an error margin, e.g. 'abs((s.@min_prev_path) - (-1)) > -epsilon', where epsilon is a very small positive value of your choice, such as -0.0001. -Successfully created queries: [tg_shortest_ss_pos_wt]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/shortest_path/weighted/positive/traceback/tg_shortest_ss_pos_wt_tb.gsql -Warning in query tg_shortest_ss_pos_wt_tb (WARN-5): line 123, col 20 -The comparison 's.@min_prev_min_path!=-1' may lead to unexpected behavior -because it involves equality test between float/double numeric values. We -suggest to do such comparison with an error margin, e.g. -'abs((s.@min_prev_min_path) - (-1)) > epsilon', where epsilon is a very small -positive value of your choice, such as 0.0001. -Successfully created queries: [tg_shortest_ss_pos_wt_tb]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_detection/count/tg_cycle_detection_count.gsql -Successfully created queries: [tg_cycle_detection_count]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_detection/full_result/standard/tg_cycle_detection.gsql -Successfully created queries: [tg_cycle_detection]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/cycle_detection/full_result/batch/tg_cycle_detection_batch.gsql -Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 37 -unsatisfiable pattern e_type_set -Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 37 -unsatisfiable pattern e_type_set -Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 35 -unsatisfiable pattern -(e_type_set:e)- :t -Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 35 -unsatisfiable pattern -(e_type_set:e)- :t -Warning in query tg_cycle_detection_batch (WARN-2): line 61, col 26 -unsatisfiable pattern Active:s -(e_type_set:e)- :t - -Type Check Error in query tg_cycle_detection_batch (TYP-8029): line 61, col 21 -the pattern "Active:s -(e_type_set:e)- :t" has an undirected edge, but the graph -does not contain any undirected edges - -Saved as draft query with type/semantic error: [tg_cycle_detection_batch]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/estimated_diameter/approximate/tg_estimate_diameter.gsql - -Semantic Check Error in query tg_estimate_diameter (SEM-45): line 52, col 34 -The tuple name or the function tg_max_BFS_depth is not defined. -Saved as draft query with type/semantic error: [tg_estimate_diameter]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/estimated_diameter/max_bfs/tg_max_BFS_depth.gsql -Successfully created queries: [tg_max_BFS_depth]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/astar_shortest_path/tg_astar.gsql -Successfully created queries: [tg_astar]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/path_between_two_vertices/one_direction/tg_all_path.gsql -Successfully created queries: [tg_all_path]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/path_between_two_vertices/bidirection/tg_all_path_bidirection.gsql -Successfully created queries: [tg_all_path_bidirection]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/minimum_spanning_forest/tg_msf.gsql -Successfully created queries: [tg_msf]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/minimum_spanning_tree/tg_mst.gsql -Successfully created queries: [tg_mst]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/bfs/tg_bfs.gsql -Successfully created queries: [tg_bfs]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Path/maxflow/tg_maxflow.gsql -Warning in query tg_maxflow (WARN-5): line 129, col 25 -The comparison 'fl-@@group_by_flow_accum.get(s,t).flow>=@@max_cap_threshold' may -lead to unexpected behavior because it involves equality test between -float/double numeric values. We suggest to do such comparison with an error -margin, e.g. 'fl-@@group_by_flow_accum.get(s,t).flow>=@@max_cap_threshold - -epsilon', where epsilon is a very small positive value of your choice, such as -0.0001. -Warning in query tg_maxflow (WARN-5): line 140, col 25 -The comparison '@@group_by_flow_accum.get(t,s).flow>=@@max_cap_threshold' may -lead to unexpected behavior because it involves equality test between -float/double numeric values. We suggest to do such comparison with an error -margin, e.g. '@@group_by_flow_accum.get(t,s).flow>=@@max_cap_threshold - -epsilon', where epsilon is a very small positive value of your choice, such as -0.0001. -Warning in query tg_maxflow (WARN-5): line 211, col 20 -The comparison '@@group_by_flow_accum.get(s,t).flow>=min_flow_threshhold' may -lead to unexpected behavior because it involves equality test between -float/double numeric values. We suggest to do such comparison with an error -margin, e.g. '@@group_by_flow_accum.get(s,t).flow>=min_flow_threshhold - -epsilon', where epsilon is a very small positive value of your choice, such as -0.0001. -Successfully created queries: [tg_maxflow]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/betweenness/tg_betweenness_cent.gsql -Successfully created queries: [tg_betweenness_cent]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/closeness/approximate/tg_closeness_cent_approx.gsql -Successfully created queries: [tg_closeness_cent_approx]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/closeness/exact/tg_closeness_cent.gsql -Successfully created queries: [tg_closeness_cent]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/eigenvector/tg_eigenvector_cent.gsql -Warning in query tg_eigenvector_cent (WARN-5): line 85, col 19 -The comparison 's.@sum_eigen_value==1.0' may lead to unexpected behavior because -it involves equality test between float/double numeric values. We suggest to do -such comparison with an error margin, e.g. 'abs((s.@sum_eigen_value) - (1.0)) < -epsilon', where epsilon is a very small positive value of your choice, such as -0.0001. -Successfully created queries: [tg_eigenvector_cent]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/influence_maximization/greedy/tg_influence_maximization_greedy.gsql -Successfully created queries: [tg_influence_maximization_greedy]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/influence_maximization/CELF/tg_influence_maximization_CELF.gsql -Warning in query tg_influence_maximization_CELF (WARN-5): line 83, col 19 -The comparison 's.@influence_value>=score' may lead to unexpected behavior -because it involves equality test between float/double numeric values. We -suggest to do such comparison with an error margin, e.g. -'s.@influence_value>=score - epsilon', where epsilon is a very small positive -value of your choice, such as 0.0001. -Successfully created queries: [tg_influence_maximization_CELF]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/degree/unweighted/tg_degree_cent.gsql -Successfully created queries: [tg_degree_cent]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/degree/weighted/tg_weighted_degree_cent.gsql -Successfully created queries: [tg_weighted_degree_cent]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql -Successfully created queries: [tg_pagerank]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/global/weighted/tg_pagerank_wt.gsql -Successfully created queries: [tg_pagerank_wt]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/personalized/multi_source/tg_pagerank_pers.gsql -Successfully created queries: [tg_pagerank_pers]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/personalized/all_pairs/tg_pagerank_pers_ap_batch.gsql -Successfully created queries: [tg_pagerank_pers_ap_batch]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/harmonic/tg_harmonic_cent.gsql -Successfully created queries: [tg_harmonic_cent]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/article_rank/tg_article_rank.gsql -Successfully created queries: [tg_article_rank]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Patterns/frequent_pattern_mining/tg_fpm_pre.gsql - -Type Check Error in query tg_fpm_pre (TYP-158): line 47, col 12 -'s.item_list' indicates no valid vertex type. -Possible reasons: - -- The expression refers to a primary_id, which is not directly -usable in the query body. To use primary_id, declare it as an -attribute. E.g "CREATE VERTEX Person (PRIMARY_ID ssn string, ssn string, age -int)" -- The expression has misspelled an attribute, or a vertex name - -Saved as draft query with type/semantic error: [tg_fpm_pre]. -Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Patterns/frequent_pattern_mining/tg_fpm.gsql -Warning in query tg_fpm (WARN-7): line 151, col 9 -POST-ACCUM clauses binding to multiple aliases ([s, t]) will be deprecated soon. -Please separate into 2 POST-ACCUM clauses, one for each alias. -Warning in query tg_fpm (WARN-7): line 151, col 9 -POST-ACCUM clauses binding to multiple aliases ([s, t]) will be deprecated soon. -Please separate into 2 POST-ACCUM clauses, one for each alias. - -Type Check Error in query tg_fpm (TYP-158): line 80, col 33 -'s.item_list' indicates no valid vertex type. -Possible reasons: - -- The expression refers to a primary_id, which is not directly -usable in the query body. To use primary_id, declare it as an -attribute. E.g "CREATE VERTEX Person (PRIMARY_ID ssn string, ssn string, age -int)" -- The expression has misspelled an attribute, or a vertex name - -Saved as draft query with type/semantic error: [tg_fpm]. -Installing queries for graph: MyGraph -Start installing queries, about 1 minute ... -tg_tri_count_fast query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_tri_count_fast?v_type=VALUE&e_type=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_tri_count query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_tri_count?v_type=VALUE&e_type=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_all_path query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_all_path?v_source=VALUE&v_source.type=VERTEX_TYPE&target_v=VALUE&target_v.type=VERTEX_TYPE&[depth=VALUE]&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_embedding_pairwise_cosine_similarity query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_embedding_pairwise_cosine_similarity?v1=VALUE&v1.type=VERTEX_TYPE&v2=VALUE&v2.type=VERTEX_TYPE&embedding_dimension=VALUE&embedding_attribute=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_common_neighbors query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_common_neighbors?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_wcc query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_wcc?v_type_set=VALUE&e_type_set=VALUE&[print_limit=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_shortest_ss_no_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_no_wt?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&[print_limit=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_maximal_indep_set query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_maximal_indep_set?v_type=VALUE&e_type=VALUE&[maximum_iteration=VALUE]&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_scc_small_world query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_scc_small_world?v_type=VALUE&e_type=VALUE&reverse_e_type=VALUE&[threshold=VALUE]&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_all_path_bidirection query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_all_path_bidirection?v_source=VALUE&v_source.type=VERTEX_TYPE&target_v=VALUE&target_v.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&[depth=VALUE]&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_pagerank_pers_ap_batch query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank_pers_ap_batch?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&batch_num=VALUE&print_results=VALUE&file_path=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_embedding_cosine_similarity query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_embedding_cosine_similarity?v1=VALUE&v1.type=VERTEX_TYPE&vert_types=VALUE&embedding_dimension=VALUE&k=VALUE&embedding_attribute=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_eigenvector_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_eigenvector_cent?v_type_set=VALUE&e_type_set=VALUE&[maximum_iteration=VALUE]&[conv_limit=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_pagerank_pers query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank_pers?source[INDEX]=VALUE&source[INDEX].type=VERTEX_TYPE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_mst query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_mst?opt_source=VALUE&opt_source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[maximum_iteration=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_bfs query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_bfs?v_type_set=VALUE&e_type_set=VALUE&[max_hops=VALUE]&v_start=VALUE&v_start.type=VERTEX_TYPE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_pagerank_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank_wt?v_type=VALUE&e_type=VALUE&weight_attribute=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_degree_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_degree_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&[in_degree=VALUE]&[out_degree=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_shortest_ss_any_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_any_wt?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[print_limit=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_resource_allocation query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_resource_allocation?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_greedy_graph_coloring query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_greedy_graph_coloring?v_type_set=VALUE&e_type_set=VALUE&[max_colors=VALUE]&[print_color_count=VALUE]&[print_stats=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_wcc_small_world query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_wcc_small_world?v_type=VALUE&e_type=VALUE&[threshold=VALUE]&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_scc query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_scc?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&top_k_dist=VALUE&print_limit=VALUE&[maximum_iteration=VALUE]&[iter_wcc=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_maximal_indep_set_random query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_maximal_indep_set_random?v_type_set=VALUE&e_type_set=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_max_BFS_depth query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_max_BFS_depth?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_harmonic_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_harmonic_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&[max_hops=VALUE]&[top_k=VALUE]&[wf=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_adamic_adar query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_adamic_adar?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_louvain query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_louvain?v_type_set=VALUE&e_type_set=VALUE&[weight_attribute=VALUE]&[maximum_iteration=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[print_stats=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_influence_maximization_greedy query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_influence_maximization_greedy?v_type=VALUE&e_type=VALUE&weight_attribute=VALUE&top_k=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_jaccard_nbor_ap_batch query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_jaccard_nbor_ap_batch?[top_k=VALUE]&v_type_set=VALUE&feat_v_type=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&similarity_edge=VALUE&[src_batch_num=VALUE]&[nbor_batch_num=VALUE]&[print_results=VALUE]&[print_limit=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_total_neighbors query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_total_neighbors?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_lcc query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_lcc?v_type=VALUE&e_type=VALUE&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_knn_cosine_cv_sub query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_cv_sub?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&v_label=VALUE&weight_attribute=VALUE&max_k=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_betweenness_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_betweenness_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type=VALUE&[max_hops=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_cosine_nbor_ss query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cosine_nbor_ss?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&top_k=VALUE&print_limit=VALUE&[print_results=VALUE]&[file_path=VALUE]&[similarity_edge=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_closeness_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_closeness_cent?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type=VALUE&[max_hops=VALUE]&[top_k=VALUE]&[wf=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_knn_cosine_all_sub query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_all_sub?source=VALUE&source.type=VERTEX_TYPE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&label=VALUE&top_k=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_slpa query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_slpa?v_type_set=VALUE&e_type_set=VALUE&threshold=VALUE&maximum_iteration=VALUE&print_limit=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_pagerank query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_cycle_detection query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cycle_detection?v_type_set=VALUE&e_type_set=VALUE&depth=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_cosine_nbor_ap_batch query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cosine_nbor_ap_batch?v_type=VALUE&e_type=VALUE&edge_attribute=VALUE&top_k=VALUE&[print_results=VALUE]&file_path=VALUE&similarity_edge=VALUE&[num_of_batches=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_astar query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_astar?source_vertex=VALUE&source_vertex.type=VERTEX_TYPE&target_vertex=VALUE&target_vertex.type=VERTEX_TYPE&e_type_set=VALUE&weight_type=VALUE&latitude=VALUE&longitude=VALUE&weight_attribute=VALUE&[print_stats=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_same_community query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_same_community?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&community_attribute=VALUE&community_attr_type=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_weisfeiler_lehman query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_weisfeiler_lehman?v_type=VALUE&e_type=VALUE&DEPTH=VALUE&print_limit=VALUE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_cycle_component query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cycle_component?v_type=VALUE&e_type=VALUE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_map_equation query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_map_equation?v_type=VALUE&e_type=VALUE&result_attribute=VALUE&[weight_attribute=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_knn_cosine_cv query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_cv?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&label=VALUE&min_k=VALUE&max_k=VALUE'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_jaccard_nbor_ss query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_jaccard_nbor_ss?source=VALUE&source.type=VERTEX_TYPE&e_type=VALUE&reverse_e_type=VALUE&[top_k=VALUE]&[print_results=VALUE]&[similarity_edge_type=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_article_rank query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_article_rank?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_weighted_degree_cent query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_weighted_degree_cent?v_type=VALUE&e_type=VALUE&reverse_e_type=VALUE&weight_attribute=VALUE&[in_degree=VALUE]&[out_degree=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_preferential_attachment query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_preferential_attachment?v_source=VALUE&v_source.type=VERTEX_TYPE&v_target=VALUE&v_target.type=VERTEX_TYPE&e_type_set=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_label_prop query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_label_prop?v_type_set=VALUE&e_type_set=VALUE&maximum_iteration=VALUE&print_limit=VALUE&[print_results=VALUE]&[file_path=VALUE]&[result_attribute=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_shortest_ss_pos_wt query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_pos_wt?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[epsilon=VALUE]&[print_results=VALUE]&[print_limit=VALUE]&[display_edges=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_kcore query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_kcore?v_type=VALUE&e_type=VALUE&[k_min=VALUE]&[k_max=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[print_all_k=VALUE]&[show_shells=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_cycle_detection_count query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_cycle_detection_count?v_type_set=VALUE&e_type_set=VALUE&depth=VALUE&batches=VALUE&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_closeness_cent_approx query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_closeness_cent_approx?v_type_set=VALUE&e_type_set=VALUE&reverse_e_type=VALUE&[top_k=VALUE]&[k=VALUE]&[max_hops=VALUE]&[epsilon=VALUE]&[print_results=VALUE]&[file_path=VALUE]&[debug=VALUE]&[sample_index=VALUE]&[max_size=VALUE]&[wf=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_knn_cosine_ss query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_knn_cosine_ss?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&weight_attribute=VALUE&label=VALUE&top_k=VALUE&[print_results=VALUE]&[file_path=VALUE]&[result_attribute=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_fastRP query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_fastRP?v_type_set=VALUE&e_type_set=VALUE&output_v_type_set=VALUE&iteration_weights=VALUE&beta=VALUE&embedding_dimension=VALUE&[default_index=VALUE]&default_length=VALUE&[default_weight=VALUE]&embedding_dim_map=VALUE&[sampling_constant=VALUE]&[random_seed=VALUE]&[result_attribute=VALUE]&[component_attribute=VALUE]&[batch_number=VALUE]&[filepath=VALUE]&[print_results=VALUE]&[choose_k=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_influence_maximization_CELF query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_influence_maximization_CELF?v_type=VALUE&e_type=VALUE&weight_attribute=VALUE&top_k=VALUE&[print_results=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_shortest_ss_pos_wt_tb query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_shortest_ss_pos_wt_tb?source=VALUE&source.type=VERTEX_TYPE&v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[epsilon=VALUE]&[print_results=VALUE]&[print_limit=VALUE]&[display_edges=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[write_size=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_maxflow query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_maxflow?source=VALUE&source.type=VERTEX_TYPE&sink=VALUE&sink.type=VERTEX_TYPE&v_type=VALUE&e_type_set=VALUE&reverse_e_type_set=VALUE&cap_attr=VALUE&cap_type=VALUE&[min_flow_threshhold=VALUE]&[print_results=VALUE]&[display_edges=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -tg_msf query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_msf?v_type_set=VALUE&e_type_set=VALUE&weight_attribute=VALUE&weight_type=VALUE&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. -Select 'm1' as compile server, now connecting ... -Node 'm1' is prepared as compile server. - -[======================================================================================================] 100% (62/62) -Query installation finished. -======================================== MyGraph2 ======================================== -Dropping the graph MyGraph2... -Successfully dropped jobs on the graph 'MyGraph2': [loading_job]. -All jobs on the graph 'MyGraph2' are dropped. -All queries are dropped. -The graph MyGraph2 is dropped. -Finished dropping graph MyGraph2. --------------------------------------------------------------------------------- -Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph2/1_create_schema.gsql -The graph MyGraph2 is created. -Successfully created schema change jobs: [change_schema_of_MyGraph]. -WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. -Kick off schema change job change_schema_of_MyGraph -Doing schema change on graph 'MyGraph2' (current version: 0) -Trying to add local vertex 'MyNode' to the graph 'MyGraph2'. -Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph2'. - -Graph MyGraph2 updated to new version 1 -The job change_schema_of_MyGraph completes in 0.933 seconds! -Local schema change succeeded. -Successfully dropped jobs on the graph 'MyGraph2': [change_schema_of_MyGraph]. --------------------------------------------------------------------------------- -Running: Creating loading job /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql -Using graph 'MyGraph2' -Successfully created loading jobs: [loading_job]. --------------------------------------------------------------------------------- -Running loading job for /home/tigergraph/mydata/northeast_usa/out.dimacs9-NE... -[Tip: Use "CTRL + C" to stop displaying the loading status update, then use "SHOW LOADING STATUS " to track the loading progress again] -[Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] -Running the following loading job: - Job name: loading_job - Jobid: MyGraph2.loading_job.file.m1.1720513297218 - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph2.loading_job.file.m1.1720513297218 -Job "MyGraph2.loading_job.file.m1.1720513297218" loading status -Current timestamp is 2024-07-09 08:21:42.475 -Loading status was last updated at 2024-07-09 08:21:42.261. -[FINISHED] m1 ( Finished: 1 / Total: 1 ) - +-------------------------------------------------------------------------------------------------------+ - | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| - |northeast_usa/out.dimacs9-NE | 3868020 | 11604060 | 0 | 804 kl/s | 4.81 s | 100 %| - +-------------------------------------------------------------------------------------------------------+ -LOAD SUCCESSFUL for loading jobid: MyGraph2.loading_job.file.m1.1720513297218 - Job ID: MyGraph2.loading_job.file.m1.1720513297218------------------------------------------------------+ - Elapsed time: 5 sec - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph2.loading_job.file.m1.1720513297218 - Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph2.loading_job.file.m1.1720513297218/summary - -Finished running loading job for /home/tigergraph/mydata/northeast_usa/out.dimacs9-NE. --------------------------------------------------------------------------------- -No queries to install for graph: MyGraph2 -algorithms_test (ALGOS-263) $ -algorithms_test (ALGOS-263) $ ll -total 40 --rwxr-xr-x 1 tigergraph tigergraph 2369 Jul 5 09:40 1_dataset.sh --rwxr-xr-x 1 tigergraph tigergraph 6042 Jul 8 06:24 2_setup.sh --rwxr-xr-x 1 tigergraph tigergraph 5213 Jul 8 09:08 3_run.sh -drwxrwxr-x 2 tigergraph tigergraph 4096 Jul 4 08:34 baseline -drwxr-xr-x 2 tigergraph tigergraph 4096 Jul 9 08:16 config -drwxrwxr-x 4 tigergraph tigergraph 4096 Jul 8 02:24 gsql -drwxr-xr-x 2 tigergraph tigergraph 4096 Jul 8 03:34 mem --rw-rw-r-- 1 tigergraph tigergraph 0 Jul 4 08:38 ReadMe.md -drwxr-xr-x 2 tigergraph tigergraph 4096 Jul 8 06:53 tools -algorithms_test (ALGOS-263) $ -``` - -# Run -``` -algorithms_test (ALGOS-263) $ ./3_run.sh -==================== topological_link_prediction/tg_preferential_attachment run 0 ==================== -Starting curl command for query: tg_preferential_attachment on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_preferential_attachment/result.json -Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_preferential_attachment/duration.txt -Finished curl command for query: tg_preferential_attachment on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_preferential_attachment/memory.txt -==================== topological_link_prediction/tg_resource_allocation run 0 ==================== -Starting curl command for query: tg_resource_allocation on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_resource_allocation/result.json -Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_resource_allocation/duration.txt -Finished curl command for query: tg_resource_allocation on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_resource_allocation/memory.txt -==================== topological_link_prediction/tg_same_community run 0 ==================== -Starting curl command for query: tg_same_community on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_same_community/result.json -Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_same_community/duration.txt -Finished curl command for query: tg_same_community on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_same_community/memory.txt -==================== topological_link_prediction/tg_adamic_adar run 0 ==================== -Starting curl command for query: tg_adamic_adar on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_adamic_adar/result.json -Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_adamic_adar/duration.txt -Finished curl command for query: tg_adamic_adar on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_adamic_adar/memory.txt -==================== topological_link_prediction/tg_total_neighbors run 0 ==================== -Starting curl command for query: tg_total_neighbors on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_total_neighbors/result.json -Duration has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_total_neighbors/duration.txt -Finished curl command for query: tg_total_neighbors on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/topological_link_prediction/tg_total_neighbors/memory.txt -==================== classification/tg_greedy_graph_coloring run 0 ==================== -Starting curl command for query: tg_greedy_graph_coloring on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16777220.RESTPP_1_1.1720513538258.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/result.json -Duration has been written to /home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/duration.txt -Finished curl command for query: tg_greedy_graph_coloring on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/classification/tg_greedy_graph_coloring/memory.txt -==================== classification/tg_maximal_indep_set run 0 ==================== -Starting curl command for query: tg_maximal_indep_set on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/classification/tg_maximal_indep_set/result.json -Duration has been written to /home/tigergraph/data/algos/classification/tg_maximal_indep_set/duration.txt -Finished curl command for query: tg_maximal_indep_set on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/classification/tg_maximal_indep_set/memory.txt -==================== classification/tg_knn_cosine_ss run 0 ==================== -Starting curl command for query: tg_knn_cosine_ss on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_ss/result.json -Duration has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_ss/duration.txt -Finished curl command for query: tg_knn_cosine_ss on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_ss/memory.txt -==================== classification/tg_knn_cosine_cv run 0 ==================== -Starting curl command for query: tg_knn_cosine_cv on graph: MyGraph -Error: Runtime Error: divider is zero. -Result has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_cv/result.json -Duration has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_cv/duration.txt -Finished curl command for query: tg_knn_cosine_cv on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/classification/tg_knn_cosine_cv/memory.txt -==================== community/tg_slpa run 0 ==================== -Starting curl command for query: tg_slpa on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16777225.RESTPP_1_1.1720514490906.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/community/tg_slpa/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_slpa/duration.txt -Finished curl command for query: tg_slpa on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_slpa/memory.txt -==================== community/tg_label_prop run 0 ==================== -Starting curl command for query: tg_label_prop on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_label_prop/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_label_prop/duration.txt -Finished curl command for query: tg_label_prop on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_label_prop/memory.txt -==================== community/tg_tri_count_fast run 0 ==================== -Starting curl command for query: tg_tri_count_fast on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_tri_count_fast/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_tri_count_fast/duration.txt -Finished curl command for query: tg_tri_count_fast on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_tri_count_fast/memory.txt -==================== community/tg_tri_count run 0 ==================== -Starting curl command for query: tg_tri_count on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_tri_count/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_tri_count/duration.txt -Finished curl command for query: tg_tri_count on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_tri_count/memory.txt -==================== community/tg_scc run 0 ==================== -Starting curl command for query: tg_scc on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_scc/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_scc/duration.txt -Finished curl command for query: tg_scc on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_scc/memory.txt -==================== community/tg_scc_small_world run 0 ==================== -Starting curl command for query: tg_scc_small_world on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_scc_small_world/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_scc_small_world/duration.txt -Finished curl command for query: tg_scc_small_world on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_scc_small_world/memory.txt -==================== community/tg_wcc run 0 ==================== -Starting curl command for query: tg_wcc on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_wcc/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_wcc/duration.txt -Finished curl command for query: tg_wcc on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_wcc/memory.txt -==================== community/tg_wcc_small_world run 0 ==================== -Starting curl command for query: tg_wcc_small_world on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/duration.txt -Finished curl command for query: tg_wcc_small_world on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/memory.txt -==================== community/tg_kcore run 0 ==================== -Starting curl command for query: tg_kcore on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_kcore/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_kcore/duration.txt -Finished curl command for query: tg_kcore on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_kcore/memory.txt -==================== community/tg_map_equation run 0 ==================== -Starting curl command for query: tg_map_equation on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_map_equation/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_map_equation/duration.txt -Finished curl command for query: tg_map_equation on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_map_equation/memory.txt -==================== community/tg_lcc run 0 ==================== -Starting curl command for query: tg_lcc on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_lcc/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_lcc/duration.txt -Finished curl command for query: tg_lcc on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_lcc/memory.txt -==================== community/tg_louvain run 0 ==================== -Starting curl command for query: tg_louvain on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/community/tg_louvain/result.json -Duration has been written to /home/tigergraph/data/algos/community/tg_louvain/duration.txt -Finished curl command for query: tg_louvain on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/community/tg_louvain/memory.txt -==================== path/tg_cycle_component run 0 ==================== -Starting curl command for query: tg_cycle_component on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/path/tg_cycle_component/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_cycle_component/duration.txt -Finished curl command for query: tg_cycle_component on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_cycle_component/memory.txt -==================== path/tg_shortest_ss_no_wt run 0 ==================== -Starting curl command for query: tg_shortest_ss_no_wt on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/duration.txt -Finished curl command for query: tg_shortest_ss_no_wt on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_no_wt/memory.txt -==================== path/tg_shortest_ss_any_wt run 0 ==================== -Starting curl command for query: tg_shortest_ss_any_wt on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16777235.RESTPP_1_1.1720516010379.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/duration.txt -Finished curl command for query: tg_shortest_ss_any_wt on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_any_wt/memory.txt -==================== path/tg_shortest_ss_pos_wt run 0 ==================== -Starting curl command for query: tg_shortest_ss_pos_wt on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/duration.txt -Finished curl command for query: tg_shortest_ss_pos_wt on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt/memory.txt -==================== path/tg_shortest_ss_pos_wt_tb run 0 ==================== -Starting curl command for query: tg_shortest_ss_pos_wt_tb on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/duration.txt -Finished curl command for query: tg_shortest_ss_pos_wt_tb on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_shortest_ss_pos_wt_tb/memory.txt -==================== path/tg_all_path run 0 ==================== -Starting curl command for query: tg_all_path on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16973847.RESTPP_1_1.1720516966396.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/path/tg_all_path/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_all_path/duration.txt -Finished curl command for query: tg_all_path on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_all_path/memory.txt -==================== path/tg_msf run 0 ==================== -Starting curl command for query: tg_msf on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16973851.RESTPP_1_1.1720517867862.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/path/tg_msf/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_msf/duration.txt -Finished curl command for query: tg_msf on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_msf/memory.txt -==================== path/tg_mst run 0 ==================== -Starting curl command for query: tg_mst on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/path/tg_mst/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_mst/duration.txt -Finished curl command for query: tg_mst on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_mst/memory.txt -==================== path/tg_bfs run 0 ==================== -Starting curl command for query: tg_bfs on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/path/tg_bfs/result.json -Duration has been written to /home/tigergraph/data/algos/path/tg_bfs/duration.txt -Finished curl command for query: tg_bfs on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/path/tg_bfs/memory.txt -==================== centrality/tg_closeness_cent_approx run 0 ==================== -Starting curl command for query: tg_closeness_cent_approx on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16842784.RESTPP_1_1.1720518776855.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/duration.txt -Finished curl command for query: tg_closeness_cent_approx on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_closeness_cent_approx/memory.txt -==================== centrality/tg_eigenvector_cent run 0 ==================== -Starting curl command for query: tg_eigenvector_cent on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_eigenvector_cent/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_eigenvector_cent/duration.txt -Finished curl command for query: tg_eigenvector_cent on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_eigenvector_cent/memory.txt -==================== centrality/tg_influence_maximization_greedy run 0 ==================== -Starting curl command for query: tg_influence_maximization_greedy on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/duration.txt -Finished curl command for query: tg_influence_maximization_greedy on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_greedy/memory.txt -==================== centrality/tg_influence_maximization_CELF run 0 ==================== -Starting curl command for query: tg_influence_maximization_CELF on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/duration.txt -Finished curl command for query: tg_influence_maximization_CELF on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_influence_maximization_CELF/memory.txt -==================== centrality/tg_degree_cent run 0 ==================== -Starting curl command for query: tg_degree_cent on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_degree_cent/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_degree_cent/duration.txt -Finished curl command for query: tg_degree_cent on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_degree_cent/memory.txt -==================== centrality/tg_weighted_degree_cent run 0 ==================== -Starting curl command for query: tg_weighted_degree_cent on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/duration.txt -Finished curl command for query: tg_weighted_degree_cent on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_weighted_degree_cent/memory.txt -==================== centrality/tg_pagerank run 0 ==================== -Starting curl command for query: tg_pagerank on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank/duration.txt -Finished curl command for query: tg_pagerank on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank/memory.txt -==================== centrality/tg_pagerank_wt run 0 ==================== -Starting curl command for query: tg_pagerank_wt on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_wt/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_wt/duration.txt -Finished curl command for query: tg_pagerank_wt on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_wt/memory.txt -==================== centrality/tg_pagerank_pers run 0 ==================== -Starting curl command for query: tg_pagerank_pers on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers/duration.txt -Finished curl command for query: tg_pagerank_pers on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers/memory.txt -==================== centrality/tg_pagerank_pers_ap_batch run 0 ==================== -Starting curl command for query: tg_pagerank_pers_ap_batch on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16842793.RESTPP_1_1.1720520220984.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/duration.txt -Finished curl command for query: tg_pagerank_pers_ap_batch on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_pagerank_pers_ap_batch/memory.txt -==================== centrality/tg_harmonic_cent run 0 ==================== -Starting curl command for query: tg_harmonic_cent on graph: MyGraph -Error: The query didn't finish because it exceeded the query timeout threshold (900 seconds). Please check GSE log for license expiration and RESTPP/GPE log with request id (16842797.RESTPP_1_1.1720521122397.N) for details. Try increase header GSQL-TIMEOUT value. -Result has been written to /home/tigergraph/data/algos/centrality/tg_harmonic_cent/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_harmonic_cent/duration.txt -Finished curl command for query: tg_harmonic_cent on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_harmonic_cent/memory.txt -==================== centrality/tg_article_rank run 0 ==================== -Starting curl command for query: tg_article_rank on graph: MyGraph -Result has been written to /home/tigergraph/data/algos/centrality/tg_article_rank/result.json -Duration has been written to /home/tigergraph/data/algos/centrality/tg_article_rank/duration.txt -Finished curl command for query: tg_article_rank on graph: MyGraph -Peak memory has been written to /home/tigergraph/data/algos/centrality/tg_article_rank/memory.txt -``` From 486d6d27f30904a51c76d99a7a0865e6c557f591 Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Fri, 19 Jul 2024 08:08:04 +0000 Subject: [PATCH 5/9] [ALGOS-263] feat(algos): Refine the test scripts and add ReadMe.md; --- algorithms_test/1_dataset.sh | 30 +- algorithms_test/2_setup.sh | 19 +- algorithms_test/4_summary.sh | 118 ----- algorithms_test/ReadMe.md | 403 ++++++++++++++++++ algorithms_test/config/1_dataset.json | 14 +- algorithms_test/config/2_setup.json | 28 +- algorithms_test/config/3_run.json | 2 +- algorithms_test/config/3_run_one.json | 34 -- .../tools/search_for_gsql_files.sh | 16 - 9 files changed, 460 insertions(+), 204 deletions(-) delete mode 100755 algorithms_test/4_summary.sh create mode 100644 algorithms_test/ReadMe.md delete mode 100644 algorithms_test/config/3_run_one.json delete mode 100755 algorithms_test/tools/search_for_gsql_files.sh diff --git a/algorithms_test/1_dataset.sh b/algorithms_test/1_dataset.sh index 7a965977..d4879d42 100755 --- a/algorithms_test/1_dataset.sh +++ b/algorithms_test/1_dataset.sh @@ -9,7 +9,7 @@ main() { fi # Read the JSON configuration file - dir=$(cd "$(dirname "$0")"; pwd) + dir=$(cd "$(dirname "$0")"; pwd) config_file="${dir}/config/1_dataset.json" # Check if the configuration file exists @@ -48,6 +48,8 @@ main() { if [ ! -d "$dataset_folder" ]; then # Download the dataset if it doesn't exist if [ ! -f "$directory/$file_name" ]; then + mkdir -p "$dataset_folder" + echo "Created directory: $dataset_folder" echo "Downloading $file_name..." if ! wget -O "$directory/$file_name" "$download_link"; then echo "Failed to download $file_name" @@ -55,13 +57,27 @@ main() { fi fi - # Unzip the dataset + # Determine the file extension and unzip accordingly echo "Unzipping $file_name into $directory..." - if tar -xvjf "$directory/$file_name" -C "$directory" --strip-components=1 --one-top-level="$top_level_dir"; then - echo "Finished unzipping $file_name." - else - echo "Failed to unzip $file_name" - fi + case "$file_name" in + *.tar.bz2) + if tar -xvjf "$directory/$file_name" -C "$directory" --strip-components=1 --one-top-level="$top_level_dir"; then + echo "Finished unzipping $file_name." + else + echo "Failed to unzip $file_name" + fi + ;; + *.gz) + if gunzip -c "$directory/$file_name" > "$directory/$top_level_dir/${file_name%.gz}"; then + echo "Finished unzipping $file_name." + else + echo "Failed to unzip $file_name" + fi + ;; + *) + echo "Unsupported file format: $file_name" + ;; + esac else echo "Directory $dataset_folder already exists, skipping unzipping." fi diff --git a/algorithms_test/2_setup.sh b/algorithms_test/2_setup.sh index a3694b12..f5af188e 100755 --- a/algorithms_test/2_setup.sh +++ b/algorithms_test/2_setup.sh @@ -99,18 +99,6 @@ main() { exit 1 fi - # Extract execution steps and set default values if not provided - to_drop_graph=$(jq -r '.execution_steps.drop_graph // empty' "$config_file") - to_drop_graph=${to_drop_graph:-false} - to_create_schema=$(jq -r '.execution_steps.create_schema // empty' "$config_file") - to_create_schema=${to_create_schema:-false} - to_create_loading_job=$(jq -r '.execution_steps.create_loading_job // empty' "$config_file") - to_create_loading_job=${to_create_loading_job:-false} - to_run_loading_job=$(jq -r '.execution_steps.run_loading_job // empty' "$config_file") - to_run_loading_job=${to_run_loading_job:-false} - to_install_queries=$(jq -r '.execution_steps.install_queries // empty' "$config_file") - to_install_queries=${to_install_queries:-false} - # Iterate over each graph and get its file_path graphs=$(jq -r '.graphs | to_entries[] | @base64' "$config_file") @@ -121,6 +109,13 @@ main() { file_path=${file_path/#\~/$HOME} queries_to_install=$(echo "$graph" | jq -r '.value.queries_to_install // empty') + # Extract execution steps and set default values if not provided + to_drop_graph=$(echo "$graph" | jq -r '.value.execution_steps.drop_graph // false') + to_create_schema=$(echo "$graph" | jq -r '.value.execution_steps.create_schema // false') + to_create_loading_job=$(echo "$graph" | jq -r '.value.execution_steps.create_loading_job // false') + to_run_loading_job=$(echo "$graph" | jq -r '.value.execution_steps.run_loading_job // false') + to_install_queries=$(echo "$graph" | jq -r '.value.execution_steps.install_queries // false') + echo "======================================== ${graph_name} ========================================" # Drop the graph diff --git a/algorithms_test/4_summary.sh b/algorithms_test/4_summary.sh deleted file mode 100755 index 23584dae..00000000 --- a/algorithms_test/4_summary.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/bin/bash - -# Main function -main() { - # Default config file path - dir=$(cd "$(dirname "$0")"; pwd) - config_file="${dir}/config/3_run.json" - - # Parse command-line arguments - while getopts "c:f:" opt; do - case ${opt} in - c) - config_file=$OPTARG - ;; - f) - filter=$OPTARG - ;; - \?) - echo "Usage: $0 [-c config_file] [-f filter]" - exit 1 - ;; - esac - done - - # Check if required commands are available - if ! command -v jq &> /dev/null; then - echo "Error: jq is not installed." - exit 1 - fi - - # Check if the configuration file exists - if [ ! -f "$config_file" ]; then - echo "Error: Configuration file not found: $config_file" - exit 1 - fi - - # Read general settings from the configuration file - default_graph_name=$(jq -r '.general_settings.default_graph_name' "$config_file") - default_output_directory=$(jq -r '.general_settings.default_output_directory' "$config_file") - default_output_directory=${default_output_directory/#\~/$HOME} - summary_file_path=$(jq -r '.general_settings.summary_file_path' "$config_file") - summary_file_path=${summary_file_path/#\~/$HOME} - - # Write header to the CSV file - echo "algorithm,run_number,query_run_time_sec,query_peak_memory_gb" > "$summary_file_path" - - # Iterate over each algorithm - algorithms=$(jq -r '.algorithms | to_entries[] | .key' "$config_file") - for algorithm in $algorithms; do - # Apply filter if specified - if [ -n "$filter" ]; then - if [[ ! "$algorithm" == *$filter ]]; then - continue - fi - fi - - runs=$(jq -r ".algorithms[\"${algorithm}\"][]" "$config_file" | jq -c .) - - # Calculate total run number for an algorithm - mapfile -t runs_array <<< "$runs" - total_runs=${#runs_array[@]} - - # Iterate over each run for the algorithm - run_index=0 - for run in "${runs_array[@]}"; do - - echo "==================== ${algorithm} run ${run_index} ====================" - - # Extract values from the JSON - graph_name=$(echo "$run" | jq -r '.graph_name // empty') - query_name=$(echo "$run" | jq -r '.query_name // empty') - output_directory=$(echo "$run" | jq -r '.output_directory // empty') - - # Set default values if not provided - graph_name=${graph_name:-$default_graph_name} - if [ "$total_runs" -eq 1 ]; then - output_directory=${output_directory:-"$default_output_directory/${algorithm}"} - else - output_directory=${output_directory:-"$default_output_directory/${algorithm}/${run_index}"} - fi - output_directory=${output_directory/#\~/$HOME} - result_file_path="$output_directory/result.json" - duration_file_path="$output_directory/duration.txt" - memory_file_path="$output_directory/memory.txt" - - # Read the duration and memory from the files - - # Check if duration.txt exists - if [ -f "$duration_file_path" ]; then - # Read query run time in ms and convert to seconds - query_run_time_ms=$(cat "$duration_file_path") - query_run_time_sec=$(echo "scale=3; $query_run_time_ms / 1000" | bc) - else - echo "Warning: $duration_file_path not found." - continue - fi - - # Check if memory.txt exists - if [ -f "$memory_file_path" ]; then - # Extract query peak memory from memory.txt - query_peak_memory=$(grep 'max mem - min mem' "$memory_file_path" | awk -F' ' '{print $(NF-1)}') - else - echo "Warning: $memory_file_path not found." - continue - fi - - # Write the data to the CSV file - echo "$algorithm,${run_index},$query_run_time_sec,$query_peak_memory" >> "$summary_file_path" - - # Increment run index - run_index=$((run_index + 1)) - done - done -} - -# Run the main function -main "$@" - diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md new file mode 100644 index 00000000..1bcd9ad6 --- /dev/null +++ b/algorithms_test/ReadMe.md @@ -0,0 +1,403 @@ +[TOC] + +# Introduction + +This project contains a set of Bash scripts for automating various tasks related to dataset management, TigerGraph setup, and query execution. The scripts are designed to work with configuration files that specify the details for each task. Below is an overview of each script and the project structure. + +## Scripts Overview + +### 1. `1_dataset.sh` +This script downloads and extracts datasets as specified in the `config/1_dataset.json` configuration file. It supports downloading files in various formats and ensures that the data is placed in the correct directories. + +### 2. `2_setup.sh` +This script sets up TigerGraph graphs by executing tasks such as dropping existing graphs, creating schemas, creating loading jobs, and installing queries. The tasks are defined in the `config/2_setup.json` configuration file. + +### 3. `3_run.sh` +This script executes queries on TigerGraph and collects performance metrics such as runtime and peak memory usage. The queries and their parameters are specified in the `config/3_run.json` configuration file. + +## File Structure + +``` +. +├── 1_dataset.sh +├── 2_setup.sh +├── 3_run.sh +├── config +│ ├── 1_dataset.json +│ ├── 2_setup.json +│ └── 3_run.json +├── gsql +│ ├── MyGraph +│ │ ├── 1_create_schema.gsql +│ │ └── 2_create_loading_job.gsql +│ └── MyGraph2 +│ ├── 1_create_schema.gsql +│ └── 2_create_loading_job.gsql +├── mem +│ ├── 1_start.sh +│ ├── 2_peak.sh +│ ├── 3_reset.sh +│ ├── 4_stop.sh +│ ├── peak.awk +│ └── run_free.sh +└── ReadMe.md +``` + +### `config` Directory +Contains JSON configuration files for each script: +- `1_dataset.json`: Configuration for dataset download and extraction. +- `2_setup.json`: Configuration for setting up TigerGraph graphs. +- `3_run.json`: Configuration for running queries and collecting metrics. + +### `gsql` Directory +Contains GSQL scripts for defining schemas and loading jobs for different graphs: +- `MyGraph/1_create_schema.gsql`: Schema definition for `MyGraph`. +- `MyGraph/2_create_loading_job.gsql`: Loading job for `MyGraph`. +- `MyGraph2/1_create_schema.gsql`: Schema definition for `MyGraph2`. +- `MyGraph2/2_create_loading_job.gsql`: Loading job for `MyGraph2`. + +The folder names `MyGraph` and `MyGraph2` represent graph names. You can add more graphs by creating new folders, each containing a `1_create_schema.gsql` file to create the schema and a `2_create_loading_job.gsql` file to create the loading job. + +### `mem` Directory +Contains scripts and utilities for monitoring memory usage during query execution. These scripts are used automatically when you run 3_run.sh, but they can also be used manually if needed. +- `1_start.sh`: Starts memory monitoring. +- `2_peak.sh`: Captures peak memory usage. +- `3_reset.sh`: Resets memory monitoring. +- `4_stop.sh`: Stops memory monitoring. +- `peak.awk`: AWK script for processing memory data. +- `run_free.sh`: Utility script for running `free` command. + +## Usage + +1. **Download and extract datasets**: + ```sh + ./1_dataset.sh + ``` + +2. **Set up TigerGraph**: + ```sh + ./2_setup.sh + ``` + +3. **Run queries and collect metrics**: + ```sh + ./3_run.sh [-c config_file] [-f filter] + ``` + +Ensure you have the necessary permissions to execute these scripts (`chmod +x script_name.sh`) and that the required tools (`jq`, `curl`, `gsql`) are installed on your system. + +# Dataset Download and Extraction Script + +This script downloads and extracts datasets specified in a JSON configuration file. + +## Prerequisites + +- Install `jq` (e.g., `sudo apt-get install jq`). + +## Configuration + +Place a JSON configuration file at `config/1_dataset.json`: + +```json +{ + "general_settings": { + "default_directory": "~/data/public" + }, + "datasets": { + "LiveJournal": { + "download_link": "https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz", + "top_level_dir": "livejournal" + }, + "Facebook": { + "download_link": "https://snap.stanford.edu/data/facebook_combined.txt.gz", + "directory": "~/data/tmp", + "top_level_dir": "facebook" + } + } +} +``` + +## Usage + +1. Ensure `jq` is installed. +2. Place the configuration file at `config/1_dataset.json`. +3. Run the script: + ```sh + ./1_dataset.sh + ``` + +## Script Details + +- Checks for `jq`. +- Reads `config/1_dataset.json`. +- Iterates over each dataset, creating directories, downloading, and extracting files. + +## Example Output + +``` +======================================== LiveJournal ======================================== +Created directory: /home/tigergraph/data/public/livejournal +Downloading soc-LiveJournal1.txt.gz... +--2024-07-19 06:49:47-- https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz +Resolving snap.stanford.edu (snap.stanford.edu)... 171.64.75.80 +Connecting to snap.stanford.edu (snap.stanford.edu)|171.64.75.80|:443... connected. +HTTP request sent, awaiting response... 200 OK +Length: 259619239 (248M) [application/x-gzip] +Saving to: ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ + +/home/tigergraph/data/public/soc-LiveJournal1.txt 100%[=============================================================================================================>] 247.59M 28.9MB/s in 9.2s + +2024-07-19 06:49:56 (26.8 MB/s) - ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ saved [259619239/259619239] + +Unzipping soc-LiveJournal1.txt.gz into /home/tigergraph/data/public... +Finished unzipping soc-LiveJournal1.txt.gz. +======================================== Facebook ======================================== +Created directory: /home/tigergraph/data/tmp/facebook +Downloading facebook_combined.txt.gz... +--2024-07-19 06:50:03-- https://snap.stanford.edu/data/facebook_combined.txt.gz +Resolving snap.stanford.edu (snap.stanford.edu)... 171.64.75.80 +Connecting to snap.stanford.edu (snap.stanford.edu)|171.64.75.80|:443... connected. +HTTP request sent, awaiting response... 200 OK +Length: 218576 (213K) [application/x-gzip] +Saving to: ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ + +/home/tigergraph/data/tmp/facebook_combined.txt.g 100%[=============================================================================================================>] 213.45K 539KB/s in 0.4s + +2024-07-19 06:50:04 (539 KB/s) - ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ saved [218576/218576] + +Unzipping facebook_combined.txt.gz into /home/tigergraph/data/tmp... +Finished unzipping facebook_combined.txt.gz. +``` + +# TigerGraph Setup Script + +This script automates the setup and management of TigerGraph graphs by executing various tasks such as dropping graphs, creating schemas, creating and running loading jobs, and installing queries based on a configuration file. + +## Prerequisites + +- Ensure `jq` and `TigerGraph` are installed on your system. +- Place the JSON configuration file at `config/2_setup.json`. + +## Configuration File + +The configuration file `config/2_setup.json` should have the following structure: + +```json +{ + "tigergraph": { + "user_name": "tigergraph", + "password": "tigergraph" + }, + "graphs": { + "MyGraph": { + "file_path": "/home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt", + "execution_steps": { + "drop_graph": true, + "create_schema": true, + "create_loading_job": true, + "run_loading_job": true, + "install_queries":true + }, + "queries_to_install": [ + "algorithms/Community/connected_components/weakly_connected_components/small_world/tg_wcc_small_world.gsql", + "algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql" + ] + }, + "MyGraph2": { + "file_path": "/home/tigergraph/data/tmp/facebook/facebook_combined.txt", + "execution_steps": { + "drop_graph": false, + "create_schema": false, + "create_loading_job": false, + "run_loading_job": false, + "install_queries":false + }, + "queries_to_install": [ + "algorithms/Community/louvain/tg_louvain.gsql" + ] + } + } +} +``` + +## Usage + +1. Ensure `jq` and `gsql` are installed. +2. Place the configuration file at `config/2_setup.json`. +3. Run the script: + ```sh + ./2_setup.sh + ``` + +## Script Details + +### Functions + +- `run_gsql_file(file_path, command, graph_name)`: Executes a GSQL file. +- `install_queries_for_graph(graph_name, queries_to_install, repo_dir)`: Installs queries for a graph. + +### Main Workflow + +- Checks for required commands (`jq` and `gsql`). +- Reads the configuration file. +- Extracts and processes each graph's details: + - Drops the graph if specified. + - Creates the schema if specified. + - Creates the loading job if specified. + - Runs the loading job if specified. + - Installs queries if specified. + +## Example Output + +``` +======================================== MyGraph ======================================== +Dropping the graph MyGraph... +Graph 'MyGraph' does not exist. +Graph 'MyGraph' does not exist. +The graph MyGraph could not be dropped! +Finished dropping graph MyGraph. +-------------------------------------------------------------------------------- +Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/1_create_schema.gsql +Stopping GPE GSE RESTPP +Successfully stopped GPE GSE RESTPP in 16.598 seconds +Starting GPE GSE RESTPP +Successfully started GPE GSE RESTPP in 0.068 seconds +The graph MyGraph is created. +Successfully created schema change jobs: [change_schema_of_MyGraph]. +WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. +Kick off schema change job change_schema_of_MyGraph +Doing schema change on graph 'MyGraph' (current version: 0) +Trying to add local vertex 'MyNode' to the graph 'MyGraph'. +Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph'. + +Graph MyGraph updated to new version 1 +The job change_schema_of_MyGraph completes in 0.643 seconds! +Local schema change succeeded. +Successfully dropped jobs on the graph 'MyGraph': [change_schema_of_MyGraph]. +-------------------------------------------------------------------------------- +Running: Creating loading job /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql +Using graph 'MyGraph' +Successfully created loading jobs: [loading_job]. +-------------------------------------------------------------------------------- +Running loading job for /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt... +[Tip: Use "CTRL + C" to stop displaying the loading status update, then use "SHOW LOADING STATUS " to track the loading progress again] +[Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] +Running the following loading job: + Job name: loading_job + Jobid: MyGraph.loading_job.file.m1.1721373808522 + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721373808522 +Job "MyGraph.loading_job.file.m1.1721373808522" loading status +Current timestamp is 2024-07-19 07:23:34.03 +Loading status was last updated at 2024-07-19 07:23:30.272. +[FINISHED] m1 ( Finished: 1 / Total: 1 ) + +-----------------------------------------------------------------------------------------------+ + | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| + |soc-LiveJournal1.txt | 3 | 6 | 68993776 | <1 l/s | 1.58 s | 100 %| + +-----------------------------------------------------------------------------------------------+ +[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt: 68993773 line(s) do not have enough number of tokens. +[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt:MyEdge: 3 object(s) have invalid attributes. +Sampling error data can be viewed by executing the 'SHOW LOADING ERROR MyGraph.loading_job.file.m1.1721373808522'. +LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1721373808522 + Job ID: MyGraph.loading_job.file.m1.1721373808522 + Elapsed time: 2 sec + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721373808522 + Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721373808522/summary + +Finished running loading job for /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt. +-------------------------------------------------------------------------------- +All queries are dropped. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/weakly_connected_components/small_world/tg_wcc_small_world.gsql +Successfully created queries: [tg_wcc_small_world]. +Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Centrality/pagerank/global/unweighted/tg_pagerank.gsql +Successfully created queries: [tg_pagerank]. +Installing queries for graph: MyGraph +Start installing queries, about 1 minute ... +tg_wcc_small_world query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_wcc_small_world?v_type=VALUE&e_type=VALUE&[threshold=VALUE]&[print_results=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +tg_pagerank query: curl -X GET 'http://127.0.0.1:9000/query/MyGraph/tg_pagerank?v_type=VALUE&e_type=VALUE&[max_change=VALUE]&[maximum_iteration=VALUE]&[damping=VALUE]&[top_k=VALUE]&[print_results=VALUE]&[result_attribute=VALUE]&[file_path=VALUE]&[display_edges=VALUE]'. Add -H "Authorization: Bearer TOKEN" if authentication is enabled. +Select 'm1' as compile server, now connecting ... +Node 'm1' is prepared as compile server. + +[========================================================================================================] 100% (2/2) +Query installation finished. +======================================== MyGraph2 ======================================== +``` + +# TigerGraph Query Execution Script + +This script automates the execution of queries on TigerGraph and collects performance metrics such as runtime and peak memory usage. + +## Prerequisites + +- Ensure `jq` and `curl` are installed on your system. +- Place the JSON configuration file at `config/3_run.json`. + +## Configuration File + +The configuration file `config/3_run.json` should have the following structure: + +```json +{ + "general_settings": { + "default_graph_name": "MyGraph", + "default_timeout_in_minutes": "15", + "default_output_directory": "~/data/algos", + "summary_file_path": "~/data/algos/summary.csv" + }, + "algorithms": { + "community/tg_wcc_small_world": [ + { + "query_name": "tg_wcc_small_world", + "parameters": { + "v_type": "MyNode", + "e_type": "MyEdge", + "threshold": 100000, + "print_results": false + } + } + ] + } +}``` + +## Usage + +1. Ensure `jq` and `curl` are installed. +2. Place the configuration file at `config/3_run.json`. +3. Run the script: + ```sh + ./3_run.sh [-c config_file] [-f filter] + ``` + +### Options + +- `-c config_file`: Specify a custom configuration file (default: `config/3_run.json`). +- `-f filter`: Filter algorithms to run based on a substring match. + +## Script Details + +### Functions + +- `run_curl_command(graph_name, query_name, para_str, timeout_ms, result_file_path, duration_file_path)`: Executes a query and collects runtime. +- `main()`: Main function to read configuration, execute queries, and collect metrics. + +### Main Workflow + +- Parses command-line arguments. +- Checks for required commands (`jq` and `curl`). +- Reads the configuration file. +- Iterates over each algorithm and its runs: + - Applies filters if specified. + - Executes the query using `curl`. + - Measures runtime and peak memory usage. + - Writes results to the specified output files and summary CSV. + +## Example Output + +``` +==================== community/tg_wcc_small_world run 0 ==================== +Starting curl command for query: tg_wcc_small_world on graph: MyGraph +Result has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/result.json +Duration has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/duration.txt +Finished curl command for query: tg_wcc_small_world on graph: MyGraph +Peak memory has been written to /home/tigergraph/data/algos/community/tg_wcc_small_world/memory.txt +``` + diff --git a/algorithms_test/config/1_dataset.json b/algorithms_test/config/1_dataset.json index e0991551..3539b53e 100644 --- a/algorithms_test/config/1_dataset.json +++ b/algorithms_test/config/1_dataset.json @@ -3,14 +3,14 @@ "default_directory": "~/data/public" }, "datasets": { - "Zhishi": { - "download_link": "http://konect.cc/files/download.tsv.zhishi-all.tar.bz2", - "top_level_dir": "zhishi-all" + "LiveJournal": { + "download_link": "https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz", + "top_level_dir": "livejournal" }, - "Northeast USA": { - "download_link": "http://konect.cc/files/download.tsv.dimacs9-NE.tar.bz2", - "directory": "~/mydata", - "top_level_dir": "northeast_usa" + "Facebook": { + "download_link": "https://snap.stanford.edu/data/facebook_combined.txt.gz", + "directory": "~/data/tmp", + "top_level_dir": "facebook" } } } diff --git a/algorithms_test/config/2_setup.json b/algorithms_test/config/2_setup.json index 1edaa1e4..19d79cb8 100644 --- a/algorithms_test/config/2_setup.json +++ b/algorithms_test/config/2_setup.json @@ -3,16 +3,16 @@ "user_name": "tigergraph", "password": "tigergraph" }, - "execution_steps": { - "drop_graph": true, - "create_schema": true, - "create_loading_job": true, - "run_loading_job": true, - "install_queries":true - }, "graphs": { "MyGraph": { - "file_path": "~/data/public/zhishi-all/out.zhishi-all", + "file_path": "/home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt", + "execution_steps": { + "drop_graph": true, + "create_schema": true, + "create_loading_job": true, + "run_loading_job": true, + "install_queries":true + }, "queries_to_install": [ "algorithms/Similarity/jaccard/all_pairs/tg_jaccard_nbor_ap_batch.gsql", "algorithms/Similarity/jaccard/single_source/tg_jaccard_nbor_ss.gsql", @@ -86,7 +86,17 @@ ] }, "MyGraph2": { - "file_path": "~/mydata/northeast_usa/out.dimacs9-NE" + "file_path": "/home/tigergraph/data/tmp/facebook/facebook_combined.txt", + "execution_steps": { + "drop_graph": false, + "create_schema": false, + "create_loading_job": false, + "run_loading_job": false, + "install_queries":false + }, + "queries_to_install": [ + "algorithms/Community/louvain/tg_louvain.gsql" + ] } } } diff --git a/algorithms_test/config/3_run.json b/algorithms_test/config/3_run.json index 99d551a2..6ca81db0 100644 --- a/algorithms_test/config/3_run.json +++ b/algorithms_test/config/3_run.json @@ -272,7 +272,7 @@ "weight_attribute": "weight", "maximum_iteration": 10, "result_attribute": "", - "file_path": "/home/tigergraph/data/algos/community/tg_lcc/result.csv", + "file_path": "/home/tigergraph/data/algos/community/tg_louvain/result.csv", "print_stats": false } } diff --git a/algorithms_test/config/3_run_one.json b/algorithms_test/config/3_run_one.json deleted file mode 100644 index 16146553..00000000 --- a/algorithms_test/config/3_run_one.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "general_settings": { - "default_graph_name": "MyGraph", - "default_timeout_in_minutes": "15", - "default_output_directory": "~/data/algos", - "summary_file_path": "~/data/algos/summary.csv" - }, - "algorithms": { - "community/tg_label_prop": [ - { - "query_name": "tg_label_prop", - "parameters": { - "v_type_set": "MyNode", - "e_type_set": "MyEdge", - "maximum_iteration": 10, - "print_limit": 10, - "print_results": false, - "file_path": "/home/tigergraph/data/algos/community/tg_label_prop/result.csv" - } - } - ], - "community/tg_wcc_small_world": [ - { - "query_name": "tg_wcc_small_world", - "parameters": { - "v_type": "MyNode", - "e_type": "MyEdge", - "threshold": 100000, - "print_results": false - } - } - ] - } -} diff --git a/algorithms_test/tools/search_for_gsql_files.sh b/algorithms_test/tools/search_for_gsql_files.sh deleted file mode 100755 index 32fc77db..00000000 --- a/algorithms_test/tools/search_for_gsql_files.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -# Directory to search for .gsql files -search_dir="../../algorithms" - -# Find all .gsql files under the search directory recursively -gsql_files=$(find "$search_dir" -type f -name "*.gsql") - -# Check if any .gsql files were found -if [ -z "$gsql_files" ]; then - echo "No .gsql files found in $search_dir" -else - echo "Found the following .gsql files:" - echo "$gsql_files" -fi - From 9e3beafce4f0307e595af0e696824f284ba7d245 Mon Sep 17 00:00:00 2001 From: xuanleilin <52686488+xuanleilin@users.noreply.github.com> Date: Fri, 19 Jul 2024 16:14:21 +0800 Subject: [PATCH 6/9] Update ReadMe.md --- algorithms_test/ReadMe.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md index 1bcd9ad6..f7f31041 100644 --- a/algorithms_test/ReadMe.md +++ b/algorithms_test/ReadMe.md @@ -1,5 +1,3 @@ -[TOC] - # Introduction This project contains a set of Bash scripts for automating various tasks related to dataset management, TigerGraph setup, and query execution. The scripts are designed to work with configuration files that specify the details for each task. Below is an overview of each script and the project structure. From be9e5b57253dd846d509eb8de6afb812f6d5998f Mon Sep 17 00:00:00 2001 From: xuanleilin <52686488+xuanleilin@users.noreply.github.com> Date: Fri, 19 Jul 2024 16:15:37 +0800 Subject: [PATCH 7/9] Update ReadMe.md --- algorithms_test/ReadMe.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md index f7f31041..ab51947f 100644 --- a/algorithms_test/ReadMe.md +++ b/algorithms_test/ReadMe.md @@ -354,7 +354,8 @@ The configuration file `config/3_run.json` should have the following structure: } ] } -}``` +} +``` ## Usage From 4368f0ff59c87f4085738d9499073ae1681f8ec3 Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Fri, 19 Jul 2024 09:01:20 +0000 Subject: [PATCH 8/9] [ALGOS-263] feat(algos): Modify the laoding job and test again; --- algorithms_test/ReadMe.md | 55 +++++++++---------- .../gsql/MyGraph/2_create_loading_job.gsql | 6 +- .../gsql/MyGraph2/2_create_loading_job.gsql | 6 +- 3 files changed, 33 insertions(+), 34 deletions(-) diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md index ab51947f..e8498f72 100644 --- a/algorithms_test/ReadMe.md +++ b/algorithms_test/ReadMe.md @@ -136,36 +136,35 @@ Place a JSON configuration file at `config/1_dataset.json`: ======================================== LiveJournal ======================================== Created directory: /home/tigergraph/data/public/livejournal Downloading soc-LiveJournal1.txt.gz... ---2024-07-19 06:49:47-- https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz +--2024-07-19 08:54:43-- https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz Resolving snap.stanford.edu (snap.stanford.edu)... 171.64.75.80 Connecting to snap.stanford.edu (snap.stanford.edu)|171.64.75.80|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 259619239 (248M) [application/x-gzip] Saving to: ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ -/home/tigergraph/data/public/soc-LiveJournal1.txt 100%[=============================================================================================================>] 247.59M 28.9MB/s in 9.2s +/home/tigergraph/data/public/soc-LiveJournal1.txt 100%[=============================================================================================================>] 247.59M 16.4MB/s in 12s -2024-07-19 06:49:56 (26.8 MB/s) - ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ saved [259619239/259619239] +2024-07-19 08:54:54 (21.2 MB/s) - ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ saved [259619239/259619239] Unzipping soc-LiveJournal1.txt.gz into /home/tigergraph/data/public... Finished unzipping soc-LiveJournal1.txt.gz. ======================================== Facebook ======================================== Created directory: /home/tigergraph/data/tmp/facebook Downloading facebook_combined.txt.gz... ---2024-07-19 06:50:03-- https://snap.stanford.edu/data/facebook_combined.txt.gz +--2024-07-19 08:55:01-- https://snap.stanford.edu/data/facebook_combined.txt.gz Resolving snap.stanford.edu (snap.stanford.edu)... 171.64.75.80 Connecting to snap.stanford.edu (snap.stanford.edu)|171.64.75.80|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 218576 (213K) [application/x-gzip] Saving to: ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ -/home/tigergraph/data/tmp/facebook_combined.txt.g 100%[=============================================================================================================>] 213.45K 539KB/s in 0.4s +/home/tigergraph/data/tmp/facebook_combined.txt.g 100%[=============================================================================================================>] 213.45K 635KB/s in 0.3s -2024-07-19 06:50:04 (539 KB/s) - ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ saved [218576/218576] +2024-07-19 08:55:02 (635 KB/s) - ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ saved [218576/218576] Unzipping facebook_combined.txt.gz into /home/tigergraph/data/tmp... -Finished unzipping facebook_combined.txt.gz. -``` +Finished unzipping facebook_combined.txt.gz.``` # TigerGraph Setup Script @@ -257,9 +256,9 @@ Finished dropping graph MyGraph. -------------------------------------------------------------------------------- Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/1_create_schema.gsql Stopping GPE GSE RESTPP -Successfully stopped GPE GSE RESTPP in 16.598 seconds +Successfully stopped GPE GSE RESTPP in 16.168 seconds Starting GPE GSE RESTPP -Successfully started GPE GSE RESTPP in 0.068 seconds +Successfully started GPE GSE RESTPP in 0.065 seconds The graph MyGraph is created. Successfully created schema change jobs: [change_schema_of_MyGraph]. WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. @@ -269,7 +268,7 @@ Trying to add local vertex 'MyNode' to the graph 'MyGraph'. Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph'. Graph MyGraph updated to new version 1 -The job change_schema_of_MyGraph completes in 0.643 seconds! +The job change_schema_of_MyGraph completes in 1.433 seconds! Local schema change succeeded. Successfully dropped jobs on the graph 'MyGraph': [change_schema_of_MyGraph]. -------------------------------------------------------------------------------- @@ -282,24 +281,24 @@ Running loading job for /home/tigergraph/data/public/livejournal/soc-LiveJournal [Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] Running the following loading job: Job name: loading_job - Jobid: MyGraph.loading_job.file.m1.1721373808522 - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721373808522 -Job "MyGraph.loading_job.file.m1.1721373808522" loading status -Current timestamp is 2024-07-19 07:23:34.03 -Loading status was last updated at 2024-07-19 07:23:30.272. + Jobid: MyGraph.loading_job.file.m1.1721379381684 + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721379381684 +Job "MyGraph.loading_job.file.m1.1721379381684" loading status +Current timestamp is 2024-07-19 08:57:22.611 +Loading status was last updated at 2024-07-19 08:57:19.988. [FINISHED] m1 ( Finished: 1 / Total: 1 ) - +-----------------------------------------------------------------------------------------------+ - | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| - |soc-LiveJournal1.txt | 3 | 6 | 68993776 | <1 l/s | 1.58 s | 100 %| - +-----------------------------------------------------------------------------------------------+ -[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt: 68993773 line(s) do not have enough number of tokens. -[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt:MyEdge: 3 object(s) have invalid attributes. -Sampling error data can be viewed by executing the 'SHOW LOADING ERROR MyGraph.loading_job.file.m1.1721373808522'. -LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1721373808522 - Job ID: MyGraph.loading_job.file.m1.1721373808522 - Elapsed time: 2 sec - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721373808522 - Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721373808522/summary + +------------------------------------------------------------------------------------------------+ + | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| + |soc-LiveJournal1.txt | 68993774 | 206981321 | 4 | 1186 kl/s | 58.14 s | 100 %| + +------------------------------------------------------------------------------------------------+ +[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt: 3 line(s) do not have enough number of tokens. +[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt:MyEdge: 1 object(s) have invalid attributes. +Sampling error data can be viewed by executing the 'SHOW LOADING ERROR MyGraph.loading_job.file.m1.1721379381684'. +LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1721379381684 + Job ID: MyGraph.loading_job.file.m1.1721379381684'SHOW LOADING ERROR MyGraph.loading_job.file.m1.1721379381684'. + Elapsed time: 58 sec + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721379381684 + Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721379381684/summary Finished running loading job for /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt. -------------------------------------------------------------------------------- diff --git a/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql b/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql index 75081e25..f2afb436 100644 --- a/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql +++ b/algorithms_test/gsql/MyGraph/2_create_loading_job.gsql @@ -5,9 +5,9 @@ USE GRAPH MyGraph CREATE LOADING JOB loading_job FOR GRAPH MyGraph { DEFINE FILENAME f1; - LOAD f1 TO VERTEX MyNode VALUES($0, _) USING SEPARATOR=" ", HEADER="true", EOL="\n", QUOTE="DOUBLE"; - LOAD f1 TO VERTEX MyNode VALUES($1, _) USING SEPARATOR=" ", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO VERTEX MyNode VALUES($0, _) USING SEPARATOR="\t", HEADER="false", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO VERTEX MyNode VALUES($1, _) USING SEPARATOR="\t", HEADER="false", EOL="\n", QUOTE="DOUBLE"; - LOAD f1 TO EDGE MyEdge VALUES($0, $1, $0) USING SEPARATOR=" ", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO EDGE MyEdge VALUES($0, $1, $0) USING SEPARATOR="\t", HEADER="false", EOL="\n", QUOTE="DOUBLE"; } diff --git a/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql b/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql index ea2e31f4..628473d6 100644 --- a/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql +++ b/algorithms_test/gsql/MyGraph2/2_create_loading_job.gsql @@ -5,9 +5,9 @@ USE GRAPH MyGraph2 CREATE LOADING JOB loading_job FOR GRAPH MyGraph2 { DEFINE FILENAME f1; - LOAD f1 TO VERTEX MyNode VALUES($0, _) USING SEPARATOR="\t", HEADER="true", EOL="\n", QUOTE="DOUBLE"; - LOAD f1 TO VERTEX MyNode VALUES($1, _) USING SEPARATOR="\t", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO VERTEX MyNode VALUES($0, _) USING SEPARATOR=" ", HEADER="false", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO VERTEX MyNode VALUES($1, _) USING SEPARATOR=" ", HEADER="false", EOL="\n", QUOTE="DOUBLE"; - LOAD f1 TO EDGE MyEdge VALUES($0, $1, $0) USING SEPARATOR="\t", HEADER="true", EOL="\n", QUOTE="DOUBLE"; + LOAD f1 TO EDGE MyEdge VALUES($0, $1, $0) USING SEPARATOR="\t", HEADER="false", EOL="\n", QUOTE="DOUBLE"; } From 848b393af1aa75adbbfe208b2c6be4c36ed57216 Mon Sep 17 00:00:00 2001 From: "xunalei.lin" Date: Fri, 19 Jul 2024 12:30:41 +0000 Subject: [PATCH 9/9] [ALGOS-263] feat(algos): Modify the graph schema and dataset; --- algorithms_test/1_dataset.sh | 5 +- algorithms_test/ReadMe.md | 82 +++++++++---------- algorithms_test/config/1_dataset.json | 6 +- algorithms_test/config/2_setup.json | 2 +- .../gsql/MyGraph/1_create_schema.gsql | 2 +- 5 files changed, 48 insertions(+), 49 deletions(-) diff --git a/algorithms_test/1_dataset.sh b/algorithms_test/1_dataset.sh index d4879d42..52d5ad57 100755 --- a/algorithms_test/1_dataset.sh +++ b/algorithms_test/1_dataset.sh @@ -46,10 +46,11 @@ main() { # Check if the folder exists before downloading the dataset dataset_folder="$directory/$top_level_dir" if [ ! -d "$dataset_folder" ]; then + echo "Created directory: $dataset_folder" + mkdir -p "$dataset_folder" + # Download the dataset if it doesn't exist if [ ! -f "$directory/$file_name" ]; then - mkdir -p "$dataset_folder" - echo "Created directory: $dataset_folder" echo "Downloading $file_name..." if ! wget -O "$directory/$file_name" "$download_link"; then echo "Failed to download $file_name" diff --git a/algorithms_test/ReadMe.md b/algorithms_test/ReadMe.md index e8498f72..38b00b4b 100644 --- a/algorithms_test/ReadMe.md +++ b/algorithms_test/ReadMe.md @@ -102,9 +102,9 @@ Place a JSON configuration file at `config/1_dataset.json`: "default_directory": "~/data/public" }, "datasets": { - "LiveJournal": { - "download_link": "https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz", - "top_level_dir": "livejournal" + "Skitter": { + "download_link": "https://snap.stanford.edu/data/as-skitter.txt.gz", + "top_level_dir": "skitter" }, "Facebook": { "download_link": "https://snap.stanford.edu/data/facebook_combined.txt.gz", @@ -133,38 +133,39 @@ Place a JSON configuration file at `config/1_dataset.json`: ## Example Output ``` -======================================== LiveJournal ======================================== -Created directory: /home/tigergraph/data/public/livejournal -Downloading soc-LiveJournal1.txt.gz... ---2024-07-19 08:54:43-- https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz +======================================== Skitter ======================================== +Created directory: /home/tigergraph/data/public/skitter +Downloading as-skitter.txt.gz... +--2024-07-19 10:28:48-- https://snap.stanford.edu/data/as-skitter.txt.gz Resolving snap.stanford.edu (snap.stanford.edu)... 171.64.75.80 Connecting to snap.stanford.edu (snap.stanford.edu)|171.64.75.80|:443... connected. HTTP request sent, awaiting response... 200 OK -Length: 259619239 (248M) [application/x-gzip] -Saving to: ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ +Length: 33209863 (32M) [application/x-gzip] +Saving to: ‘/home/tigergraph/data/public/as-skitter.txt.gz’ -/home/tigergraph/data/public/soc-LiveJournal1.txt 100%[=============================================================================================================>] 247.59M 16.4MB/s in 12s +/home/tigergraph/data/public/as-skitt 100%[=======================================================================>] 31.67M 12.3MB/s in 2.6s -2024-07-19 08:54:54 (21.2 MB/s) - ‘/home/tigergraph/data/public/soc-LiveJournal1.txt.gz’ saved [259619239/259619239] +2024-07-19 10:28:51 (12.3 MB/s) - ‘/home/tigergraph/data/public/as-skitter.txt.gz’ saved [33209863/33209863] -Unzipping soc-LiveJournal1.txt.gz into /home/tigergraph/data/public... -Finished unzipping soc-LiveJournal1.txt.gz. +Unzipping as-skitter.txt.gz into /home/tigergraph/data/public... +Finished unzipping as-skitter.txt.gz. ======================================== Facebook ======================================== Created directory: /home/tigergraph/data/tmp/facebook Downloading facebook_combined.txt.gz... ---2024-07-19 08:55:01-- https://snap.stanford.edu/data/facebook_combined.txt.gz +--2024-07-19 10:28:52-- https://snap.stanford.edu/data/facebook_combined.txt.gz Resolving snap.stanford.edu (snap.stanford.edu)... 171.64.75.80 Connecting to snap.stanford.edu (snap.stanford.edu)|171.64.75.80|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 218576 (213K) [application/x-gzip] Saving to: ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ -/home/tigergraph/data/tmp/facebook_combined.txt.g 100%[=============================================================================================================>] 213.45K 635KB/s in 0.3s +/home/tigergraph/data/tmp/facebook_co 100%[=======================================================================>] 213.45K 622KB/s in 0.3s -2024-07-19 08:55:02 (635 KB/s) - ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ saved [218576/218576] +2024-07-19 10:28:52 (622 KB/s) - ‘/home/tigergraph/data/tmp/facebook_combined.txt.gz’ saved [218576/218576] Unzipping facebook_combined.txt.gz into /home/tigergraph/data/tmp... -Finished unzipping facebook_combined.txt.gz.``` +Finished unzipping facebook_combined.txt.gz. +``` # TigerGraph Setup Script @@ -187,7 +188,7 @@ The configuration file `config/2_setup.json` should have the following structure }, "graphs": { "MyGraph": { - "file_path": "/home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt", + "file_path": "/home/tigergraph/data/public/skitter/as-skitter.txt", "execution_steps": { "drop_graph": true, "create_schema": true, @@ -256,19 +257,19 @@ Finished dropping graph MyGraph. -------------------------------------------------------------------------------- Running: Creating schema /home/tigergraph/gsql-graph-algorithms/algorithms_test/gsql/MyGraph/1_create_schema.gsql Stopping GPE GSE RESTPP -Successfully stopped GPE GSE RESTPP in 16.168 seconds +Successfully stopped GPE GSE RESTPP in 16.500 seconds Starting GPE GSE RESTPP -Successfully started GPE GSE RESTPP in 0.065 seconds +Successfully started GPE GSE RESTPP in 0.063 seconds The graph MyGraph is created. Successfully created schema change jobs: [change_schema_of_MyGraph]. WARNING: When modifying the graph schema, reinstalling all affected queries is required, and the duration of this process may vary based on the number and complexity of the queries. To skip query reinstallation, you can run with the '-N' option, but manual reinstallation of queries will be necessary afterwards. Kick off schema change job change_schema_of_MyGraph Doing schema change on graph 'MyGraph' (current version: 0) Trying to add local vertex 'MyNode' to the graph 'MyGraph'. -Trying to add local edge 'MyEdge' and its reverse edge 'rev_MyEdge' to the graph 'MyGraph'. +Trying to add local edge 'MyEdge' to the graph 'MyGraph'. Graph MyGraph updated to new version 1 -The job change_schema_of_MyGraph completes in 1.433 seconds! +The job change_schema_of_MyGraph completes in 0.687 seconds! Local schema change succeeded. Successfully dropped jobs on the graph 'MyGraph': [change_schema_of_MyGraph]. -------------------------------------------------------------------------------- @@ -276,31 +277,28 @@ Running: Creating loading job /home/tigergraph/gsql-graph-algorithms/algorithms_ Using graph 'MyGraph' Successfully created loading jobs: [loading_job]. -------------------------------------------------------------------------------- -Running loading job for /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt... +Running loading job for /home/tigergraph/data/public/skitter/as-skitter.txt... [Tip: Use "CTRL + C" to stop displaying the loading status update, then use "SHOW LOADING STATUS " to track the loading progress again] [Tip: Manage loading jobs with "ABORT/RESUME LOADING JOB "] Running the following loading job: Job name: loading_job - Jobid: MyGraph.loading_job.file.m1.1721379381684 - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721379381684 -Job "MyGraph.loading_job.file.m1.1721379381684" loading status -Current timestamp is 2024-07-19 08:57:22.611 -Loading status was last updated at 2024-07-19 08:57:19.988. + Jobid: MyGraph.loading_job.file.m1.1721390365568 + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721390365568 +Job "MyGraph.loading_job.file.m1.1721390365568" loading status +Current timestamp is 2024-07-19 11:59:41.602 +Loading status was last updated at 2024-07-19 11:59:36.576. [FINISHED] m1 ( Finished: 1 / Total: 1 ) - +------------------------------------------------------------------------------------------------+ - | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| - |soc-LiveJournal1.txt | 68993774 | 206981321 | 4 | 1186 kl/s | 58.14 s | 100 %| - +------------------------------------------------------------------------------------------------+ -[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt: 3 line(s) do not have enough number of tokens. -[WARNING] bad data in m1 /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt:MyEdge: 1 object(s) have invalid attributes. -Sampling error data can be viewed by executing the 'SHOW LOADING ERROR MyGraph.loading_job.file.m1.1721379381684'. -LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1721379381684 - Job ID: MyGraph.loading_job.file.m1.1721379381684'SHOW LOADING ERROR MyGraph.loading_job.file.m1.1721379381684'. - Elapsed time: 58 sec - Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721379381684 - Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721379381684/summary - -Finished running loading job for /home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt. + +---------------------------------------------------------------------------------------------------------+ + | FILENAME | LINES | OBJECTS | ERRORS | AVG SPEED | DURATION | PERCENTAGE| + |public/skitter/as-skitter.txt | 11095298 | 33285894 | 0 | 1023 kl/s | 10.84 s | 100 %| + +---------------------------------------------------------------------------------------------------------+ +LOAD SUCCESSFUL for loading jobid: MyGraph.loading_job.file.m1.1721390365568 + Job ID: MyGraph.loading_job.file.m1.1721390365568---------------------------------------------------------+ + Elapsed time: 11 sec + Log directory: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721390365568 + Summary: /home/tigergraph/tigergraph/log/fileLoader/MyGraph.loading_job.file.m1.1721390365568/summary + +Finished running loading job for /home/tigergraph/data/public/skitter/as-skitter.txt. -------------------------------------------------------------------------------- All queries are dropped. Running: Creating query /home/tigergraph/gsql-graph-algorithms/algorithms_test/../algorithms/Community/connected_components/weakly_connected_components/small_world/tg_wcc_small_world.gsql diff --git a/algorithms_test/config/1_dataset.json b/algorithms_test/config/1_dataset.json index 3539b53e..bb6efa95 100644 --- a/algorithms_test/config/1_dataset.json +++ b/algorithms_test/config/1_dataset.json @@ -3,9 +3,9 @@ "default_directory": "~/data/public" }, "datasets": { - "LiveJournal": { - "download_link": "https://snap.stanford.edu/data/soc-LiveJournal1.txt.gz", - "top_level_dir": "livejournal" + "Skitter": { + "download_link": "https://snap.stanford.edu/data/as-skitter.txt.gz", + "top_level_dir": "skitter" }, "Facebook": { "download_link": "https://snap.stanford.edu/data/facebook_combined.txt.gz", diff --git a/algorithms_test/config/2_setup.json b/algorithms_test/config/2_setup.json index 19d79cb8..a54b4a77 100644 --- a/algorithms_test/config/2_setup.json +++ b/algorithms_test/config/2_setup.json @@ -5,7 +5,7 @@ }, "graphs": { "MyGraph": { - "file_path": "/home/tigergraph/data/public/livejournal/soc-LiveJournal1.txt", + "file_path": "/home/tigergraph/data/public/skitter/as-skitter.txt", "execution_steps": { "drop_graph": true, "create_schema": true, diff --git a/algorithms_test/gsql/MyGraph/1_create_schema.gsql b/algorithms_test/gsql/MyGraph/1_create_schema.gsql index 8007851d..b1bf9383 100644 --- a/algorithms_test/gsql/MyGraph/1_create_schema.gsql +++ b/algorithms_test/gsql/MyGraph/1_create_schema.gsql @@ -7,7 +7,7 @@ CREATE SCHEMA_CHANGE JOB change_schema_of_MyGraph FOR GRAPH MyGraph { ADD VERTEX MyNode (PRIMARY_ID id STRING, community STRING) WITH PRIMARY_ID_AS_ATTRIBUTE="true"; # 2.2 Create edges - ADD DIRECTED EDGE MyEdge (FROM MyNode, TO MyNode, weight DOUBLE) WITH REVERSE_EDGE="rev_MyEdge"; + ADD UNDIRECTED EDGE MyEdge (FROM MyNode, TO MyNode, weight DOUBLE); } # 3. Run schema_change job