..compiling the development branch of amroc
# create a workspace
mkdir vtf_dev
cd vtf_dev/
# check out the "check out" script from vtf
scp @cvs:/home/proj/vtf3d/CheckoutAMROC_Development.sh .
# some of the setup files are under a differnt group, and
# require password, provided by the checkout script itself
# check out development branch of repository
CheckoutAMROC_Development.sh
# formerly we needed to
# bash
# export config-ralf/config.bashrc
# do some path arrangement
# source path.sh
# . path.sh # bash equivalent
# go into the correct vtf dir
cd vtf/
# edit setup file (take out opt=YES)
vi setup
# execute the setup
./setup
# go back up a dir, and into the platform dir
cd ../Linux-2.0_x86-debug-mpi/
# go into each of the examples dir, and compile
cd amroc/clawpack/applications
cd euler/
cd 2d/Spheres/
make
cd ../../3d/Spheres/
make
# move into the vtf_dev dir
cd ~/CVS/vtf_dev/vtf/
# source file to setup paths
. ../Linux-2.0_x86-debug-mpi/pathes
# source ../Linux-2.0_x86-debug-mpi/pathes # csh version
# execute a sample RAMP 2d example
cd amroc/clawpack/applications/euler/2d/Ramp
more solver.in
xnodemon &
./run.py 8 &
tail -f out.txt
# take a look at the converters
cd ../../../../..
cd ../../Linux-2.0_x86-debug-mpi/amroc/converters/
cd src/
# compile converters
make
ls -l
# we go look for actual installed executables
cd ../../../../..
cd vtf_dev/vtf/amroc/converters/
cvs update -r amroc_dev_visualization
# go back and check un running application
cd ../clawpack/applications/euler/2d/Ramp
tail -f out.txt
# go back to SPHERES demo and run it
cd ../../3D/Spheres/
vi solver.in
./run.py 8
# number of processors
# visualize the results using visual3
vi display.in
hdf2v3 80
## number of steps available
# visualize the results using paraview
vi display_file.in
# # sample display_file.in
# Type 1
# FileType 12
# Keys d,u,v,p,i,s
# DisplayMinLevel 0
# DisplayMaxLevel 10
hdf2file -m -f display_file.in -fs solver.in > ! spheres.01.vtu
parview &
# open data view spheres.01.vtu
|