batch_command | |
basebinary | exec |
is_all_external_libraries_in_cc | false |
ranges_count | 20 |
excluded_areas | { }, |
decan_multi_variant | true |
dataset_handler | link |
cqa_params | { }, |
localbinary | /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/run/oneview_runs/compilers/gcc_5/oneview_results_1758030592/binaries/exec |
lprof_post_process_params | { }, |
lprof_params | |
filter_decan | { type = all ; }, |
batch_script | |
maximal_path_number | 4 |
base_run_index | 0 |
_scalability_bins | { }, |
multiruns_params | { }, |
delay | 0 |
_is_loaded | /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/run/oneview_runs/compilers/gcc_5/oneview_run_1758030592/config.json |
repetitions | 31 |
outliers_count | 0 |
thread_filter_threshold | 1% |
object_coverage_threshold | 0.01 |
job_submission_threshold | 0s |
environment_variables | { }, |
script_variables | { }, |
scalability_reference | main |
decan_threshold | 500 |
is_sudo_available | false |
number_nodes | 1 |
keep_executable_location | false |
number_processes | 1 |
base_run_name | gcc_5 |
vprof_params | |
optimizer_loop_count | 10 |
throughput_max_core | 0 |
__filter | true |
binary | /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/run/binaries/gcc_5/exec |
throughput_core | -1 |
bucket_threshold | 1 |
external_libraries | { 1 = /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/build/llama.cpp/../gcc_5/bin/libggml-base.so ; 2 = /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/build/llama.cpp/../gcc_5/bin/libggml-blas.so ; 3 = /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/build/llama.cpp/../gcc_5/bin/libggml-cpu.so ; 4 = /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/build/llama.cpp/../gcc_5/bin/libggml.so ; 5 = /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/build/llama.cpp/../gcc_5/bin/libllama.so ; }, |
mpi_command | mpirun -n <number_processes> --bind-to none --report-bindings |
run_directory | /home/eoseret/Tools/QaaS/qaas_runs/ip-172-31-47-249.ec2.internal/175-802-9624/llama.cpp/run/oneview_runs/compilers/gcc_5/oneview_run_1758030592 |
included_areas | { }, |
custom_categories | { { type = library ; value = libggml-base.so ; }, { type = library ; value = libggml-blas.so ; }, { type = library ; value = libggml-cpu.so ; }, { type = library ; value = libggml.so ; }, { type = library ; value = libllama.so ; }, }, |
_is_custom_categories | false |
dataset | |
filter | { type = number ; value = 1 ; }, |
additional_hwc | { }, |
decan_all_variants | true |
decan_params | |
run_command | <executable> -m meta-llama-3.1-8b-instruct-Q8_0.gguf -no-cnv -t 96 -n 512 -p "what is a LLM?" --seed 0 |
profile_start | { unit = none ; value = 0 ; }, |
pinning_command | |
frequencies | { 1 = 0 ; }, |