timemory
timemory copied to clipboard
timemory-plotter --table fails on tutorial output
timemory-plotter fails for lulesh app:
timemory-plotter -t "LuLesh" -f timem-output/lulesh.json --table
Traceback (most recent call last):
File "/opt/view/lib/python3.7/site-packages/timemory/plotting/__init__.py", line 228, in embedded_plot
timem(_cdata, args.files[i], args)
File "/opt/view/lib/python3.7/site-packages/timemory/plotting/table.py", line 96, in timem
sum_data[i] += value
TypeError: unsupported operand type(s) for +=: 'int' and 'str'
Exception - unsupported operand type(s) for +=: 'int' and 'str'
To reproduce:
git clone -b ecp-2021 https://github.com/NERSC/timemory-tutorials.git
# missing instruction
cd timemory-tutorials
docker pull nersc/timemory:ecp-2021-cpu
docker run -it --rm -v $PWD:/home/timemory-tutorials nersc/timemory:ecp-2021-cpu
# inside the container
cd timemory-tutorials/apps
./build.sh
source setup-env.sh
# this works
timem -o timem-output/lulesh -- lulesh
timemory-plotter -t "LuLesh" -f timem-output/lulesh.json --table
Uploaded JSON as txt as github does not support JSON uploads: lulesh.json.txt
{
"timemory": {
"command_line": [
"timem",
"lulesh"
],
"config": {
"use_shell": false,
"use_mpi": false,
"use_papi": false,
"use_sample": true,
"debug": false,
"verbose": 0,
"shell": "/bin/bash",
"shell_flags": "",
"sample_freq": 5.0,
"sample_delay": 0.000001
},
"timem": [
{
"wall_clock": {
"value": 14.8807459,
"repr": 14.8807459,
"laps": 75,
"unit_value": 1,
"unit_repr": "sec"
},
"user_clock": {
"value": 55.6,
"repr": 55.6,
"laps": 75,
"unit_value": 1,
"unit_repr": "sec"
},
"system_clock": {
"value": 1.78,
"repr": 1.78,
"laps": 75,
"unit_value": 1,
"unit_repr": "sec"
},
"cpu_clock": {
"value": 57.38,
"repr": 57.38,
"laps": 75,
"unit_value": 1,
"unit_repr": "sec"
},
"cpu_util": {
"value": 385.59326265216637,
"repr": 385.59326265216637,
"laps": 75,
"unit_value": 1,
"unit_repr": "%"
},
"peak_rss": {
"value": 40.988,
"repr": 40.988,
"laps": 75,
"unit_value": 1000000,
"unit_repr": "MB"
},
"page_rss": {
"value": 41.971712,
"repr": 41.971712,
"laps": 0,
"unit_value": 1000000,
"unit_repr": "MB"
},
"virtual_memory": {
"value": 300.703744,
"repr": 300.703744,
"laps": 0,
"unit_value": 1000000,
"unit_repr": "MB"
},
"num_major_page_faults": {
"value": 0,
"repr": 0,
"laps": 75,
"unit_value": 1,
"unit_repr": ""
},
"num_minor_page_faults": {
"value": 5273,
"repr": 5273,
"laps": 75,
"unit_value": 1,
"unit_repr": ""
},
"priority_context_switch": {
"value": 6833,
"repr": 6833,
"laps": 75,
"unit_value": 1,
"unit_repr": ""
},
"voluntary_context_switch": {
"value": 3568,
"repr": 3568,
"laps": 75,
"unit_value": 1,
"unit_repr": ""
},
"read_char": {
"value": {
"first": 0.024256,
"second": 0.1222502595600415
},
"repr": " 0.024256 MB, 0.122250 MB/sec rchar",
"laps": 75,
"unit_value": {
"first": 1000000.0,
"second": 1000000000000000.0
},
"unit_repr": {
"first": "MB",
"second": "MB/sec"
}
},
"read_bytes": {
"value": {
"first": 0.0,
"second": 0.0
},
"repr": " 0.000000 MB, 0.000000 MB/sec read_bytes",
"laps": 75,
"unit_value": {
"first": 1000000.0,
"second": 1000000000000000.0
},
"unit_repr": {
"first": "MB",
"second": "MB/sec"
}
},
"written_char": {
"value": {
"value0": 0.00146,
"value1": 0.0073564275758059
},
"repr": " 0.001460 MB, 0.007356 MB/sec wchar",
"laps": 75,
"unit_value": {
"value0": 1000000.0,
"value1": 1000000000000000.0
},
"unit_repr": {
"value0": "MB",
"value1": "MB/sec"
}
},
"written_bytes": {
"value": {
"value0": 0.0,
"value1": 0.0
},
"repr": " 0.000000 MB, 0.000000 MB/sec write_bytes",
"laps": 75,
"unit_value": {
"value0": 1000000.0,
"value1": 1000000000000000.0
},
"unit_repr": {
"value0": "MB",
"value1": "MB/sec"
}
}
}
]
}
}
Offending line:
https://github.com/NERSC/timemory/blob/d84ccb79cb452f710917d3fa8c17a5ecf3a07406/timemory/plotting/table.py#L92
Issue only exists on Linux and not macOS bc {read,written}_{bytes,char} are only available as Linux. Should use "value" instead of "repr" and should make sure it can handle multidimensional data.
Also need to export the data labels for multidimensional data.
(These are just notes for me btw)