#!/bin/bash
#PBS -l walltime=24:00:00
# selecting 2 nodes = 2 * 4 = 8 processors for use, mpiprocs needs to equal ncpus and doesn't exceed 4
#PBS -l select=2:mpiprocs=4:ncpus=4
# scatter required
#PBS -l place=scatter
# account required
#PBS -A yout_project_number
cd $PBS_O_WORKDIR
echo "RUNNING ON: `/usr/bin/uniq $PBS_NODEFILE`"
mpirun_jaws -np 8 interFoam -parallel > $WORKDIR/test_case/
This is not a blog, but my notes and tips for research. Feel free to take any information and leave any comments or questions.
Search This Blog
Thursday, March 31, 2011
Monday, March 28, 2011
Clone Remote Subversion Repositories With svnsync
http://www.thoughtspark.org/node/10
export nameREPO=svnfractal
export fromREPO=/media/Dell/media/mybook/subversion/albertsk2011/$nameREPO
export fromTYPE=file
export destREPO=/media/seagate/svnsync/$nameREPO
export destTYPE=file
svnadmin create $destREPO
echo '#!/bin/sh' > $destREPO/hooks/pre-revprop-change
chmod +x $destREPO/hooks/pre-revprop-change
svnsync init $destTYPE://$destREPO $fromTYPE://$fromREPO
svnsync sync $destTYPE://$destREPO
- Create your local repository: svnadmin create PATH
- Create an empty pre-revprop-change hook script: echo '#!/bin/bash' > PATH/hooks/pre-revprop-change
- Make the pre-revprop-change hook script executable: chmod +x PATH/hooks/pre-revprop-change
- Initialize svnsync: svnsync init file:⁄⁄⁄PATH URL_TO_REPO_ROOT_TO_CLONE
- Synchronize: svnsync sync DEST_URL
export nameREPO=svnfractal
export fromREPO=/media/Dell/media/mybook/subversion/albertsk2011/$nameREPO
export fromTYPE=file
export destREPO=/media/seagate/svnsync/$nameREPO
export destTYPE=file
svnadmin create $destREPO
echo '#!/bin/sh' > $destREPO/hooks/pre-revprop-change
chmod +x $destREPO/hooks/pre-revprop-change
svnsync init $destTYPE://$destREPO $fromTYPE://$fromREPO
svnsync sync $destTYPE://$destREPO
Friday, March 25, 2011
Thursday, March 24, 2011
How to install OpenFOAM in Mac
Preparations
here we go.
But I was NOT able to install OpenFOAM on my Mac.
- Obtain the necessary software through MacPorts.
- libtool
- Obtain the latest version of XCode.
- This may be available on your Max OS X Install CD/DVD.
- Available online at http://developer.apple.com/technologies/xcode.html.
- Note: You will need to sign in with your Apple ID and password to access the download.
- Obtain the latest version of XQuartz from http://xquartz.macosforge.org/.
- Obtain the latest version of Qt from http://qt.nokia.com/downloads/qt-for-open-source-cpp-development-on-mac-os-x.
- Note: You do not need the full SDK. The "QT libraries" download is sufficient, and will include qmake.
- Obtain the latest version of CMake from http://www.cmake.org/cmake/resources/software.html
here we go.
But I was NOT able to install OpenFOAM on my Mac.
Installation of Elmer CFD using Intel Compilers ver. 12
All most all routines are compiled with some warning and recommendation messages.
Two key things should be done to compile files in fem/src.
1. "CONTIGUOUS" option should be disabled in configure files, Makefile files, and especially acx_elmer.m4.
2. The following directory should be included in $LD_LIBRARY_PATH
/opt/intel/composerxe-2011.2.137/compiler/lib/ia32/
3. By running 'make check' in fem, I got 154 passes out of 163:
Making check in src
make[1]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src'
Making check in view3d
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/view3d'
make[2]: Nothing to be done for `check'.
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/view3d'
Making check in viewaxis
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/viewaxis'
make[2]: Nothing to be done for `check'.
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/viewaxis'
Making check in binio
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
Making check in test
make[3]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make writetest readtest
make[4]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
ifort -I.. -O -fPIC -I. -Ibinio -c -o writetest.o writetest.f90
ifort -I.. -O -fPIC -I. -Ibinio -L. -L/opt/elmer/intel/lib -o writetest writetest.o ../libbinio.a -L. -ldl -L/opt/elmer/intel/lib -lm
ifort -I.. -O -fPIC -I. -Ibinio -c -o readtest.o readtest.f90
ifort -I.. -O -fPIC -I. -Ibinio -L. -L/opt/elmer/intel/lib -o readtest readtest.o ../libbinio.a -L. -ldl -L/opt/elmer/intel/lib -lm
make[4]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make check-TESTS
make[4]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
cp writetest.sh run-writetest
chmod a+x run-writetest
cp readtest.sh run-readtest
chmod a+x run-readtest
PASS: run-writetest
PASS: run-readtest
==================
All 2 tests passed
==================
make[4]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make[3]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make[3]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
make[3]: Nothing to be done for `check-am'.
make[3]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src'
make[2]: Nothing to be done for `check-am'.
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src'
make[1]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src'
Making check in tests
make[1]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
make check-TESTS
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
$ELMER_HOME undefined, setting it to ../src
test 1 : 1dtests [PASSED], CPU time=0.12
test 2 : 1sttime [PASSED], CPU time=0.5
test 3 : 2ndtime [PASSED], CPU time=0.92
test 4 : AdvReactDG [PASSED], CPU time=1.23
test 5 : BlockLinElast1 [PASSED], CPU time=1.62
test 6 : BlockPoisson1 [PASSED], CPU time=1.76
test 7 : BlockPoisson2 [PASSED], CPU time=1.88
test 8 : BlockPoisson3 [PASSED], CPU time=2.32
test 9 : CapacitanceMatrix [PASSED], CPU time=2.6
test 10 : CavityLid [PASSED], CPU time=3.26
test 11 : CavityLid2 [PASSED], CPU time=7.56
test 12 : CoordinateScaling [PASSED], CPU time=7.69
test 13 : CoupledPoisson1 [PASSED], CPU time=7.81
test 14 : CoupledPoisson2 [PASSED], CPU time=8.93
test 15 : CoupledPoisson3 [PASSED], CPU time=9.07
test 16 : CoupledPoisson4 [PASSED], CPU time=9.21
test 17 : CoupledPoisson5 [PASSED], CPU time=9.38
test 18 : CoupledPoisson6 [PASSED], CPU time=9.52
test 19 : CoupledPoisson7 [PASSED], CPU time=9.66
test 20 : CoupledPoisson8 [PASSED], CPU time=9.81
test 21 : CoupledPoisson9 [PASSED], CPU time=9.95
test 22 : CurvedBndryPFEM [PASSED], CPU time=10.06
test 23 : DivergenceAnalytic2D [PASSED], CPU time=10.82
test 24 : ElastElstat1DBeam [PASSED], CPU time=11.02
test 25 : ElastElstatBeam [PASSED], CPU time=11.49
test 26 : ElasticLubrication [PASSED], CPU time=14.59
test 27 : ExtrusionStructured2D [PASSED], CPU time=18.16
test 28 : HeatControl [PASSED], CPU time=18.29
test 29 : HeatControl2 [PASSED], CPU time=18.51
test 30 : HeatGap [PASSED], CPU time=18.67
test 31 : HelmholtzBEM [PASSED], CPU time=19.34
test 32 : HelmholtzEdge [PASSED], CPU time=19.63
test 33 : HelmholtzFEM [PASSED], CPU time=19.9
test 34 : HelmholtzFace [PASSED], CPU time=20.21
test 35 : InductionHeating [PASSED], CPU time=20.7
test 36 : InductionHeating2 [PASSED], CPU time=21.56
test 37 : L2norm look at [L2norm/test.log] for details
test 38 : LimitDisplacement [PASSED], CPU time=24.44
test 39 : LimitTemperature [PASSED], CPU time=28.97
test 40 : LimitTemperature2 [PASSED], CPU time=31.52
test 41 : LubricationTunedForce [PASSED], CPU time=32.45
test 42 : MeshRefineGrading [PASSED], CPU time=36.8
test 43 : NaturalConvection [PASSED], CPU time=43.66
test 44 : NormalTangentialBC [PASSED], CPU time=45.72
test 45 : OptimizeSimplexFourHeaters [PASSED], CPU time=50.37
test 46 : PhaseChange [PASSED], CPU time=53.94
test 47 : PhaseChange2 [PASSED], CPU time=54.86
test 48 : PhaseChange3 [PASSED], CPU time=55.8
test 49 : PoissonBEM [PASSED], CPU time=55.95
test 50 : PoissonDG [PASSED], CPU time=56.2
test 51 : PoissonPFEM [PASSED], CPU time=56.32
test 52 : PorousPipe [PASSED], CPU time=57.32
test 53 : Q1Q0 [PASSED], CPU time=57.53
test 54 : RichardsDyke [PASSED], CPU time=62.57
test 55 : RichardsDyke2 [PASSED], CPU time=72.15
test 56 : RigidMeshMapper1 [PASSED], CPU time=72.74
test 57 : RigidMeshMapper2 [PASSED], CPU time=73.25
test 58 : RotatingFlow [PASSED], CPU time=74.21
test 59 : Step_ke [PASSED], CPU time=76.6
test 60 : Step_ns [PASSED], CPU time=77.14
test 61 : Step_sa [PASSED], CPU time=87.75
test 62 : Step_sst-kw-wf [PASSED], CPU time=107.02
test 63 : Step_stokes [PASSED], CPU time=107.21
test 64 : Step_stokes_block [PASSED], CPU time=107.49
test 65 : Step_v2f [PASSED], CPU time=132.67
test 66 : StokesPFEM [PASSED], CPU time=132.89
test 67 : StokesProj [PASSED], CPU time=133.45
test 68 : StrainCalculation01 [PASSED], CPU time=133.91
test 69 : StrainCalculation02 [PASSED], CPU time=142.01
test 70 : ThermalBiMetal [PASSED], CPU time=142.23
test 71 : ThermalBiMetal2 [PASSED], CPU time=142.49
test 72 : ThermalCompress [PASSED], CPU time=143.41
test 73 : TimeAdapt [PASSED], CPU time=145.49
test 74 : WaveEqu [PASSED], CPU time=145.69
test 75 : adaptivity1 look at [adaptivity1/test.log] for details
test 76 : adaptivity2 look at [adaptivity2/test.log] for details
test 77 : adaptivity3 look at [adaptivity3/test.log] for details
test 78 : adaptivity4 look at [adaptivity4/test.log] for details
test 79 : adaptivity5 look at [adaptivity5/test.log] for details
test 80 : adv_diff1 [PASSED], CPU time=146.93
test 81 : adv_diff2 [PASSED], CPU time=147.93
test 82 : adv_diff3 [PASSED], CPU time=150.8
test 83 : adv_diff4 [PASSED], CPU time=155.33
test 84 : amultg [PASSED], CPU time=156.72
test 85 : amultg2 [PASSED], CPU time=157.24
test 86 : beam-springs [PASSED], CPU time=158.1
test 87 : bentonite [PASSED], CPU time=158.24
test 88 : bodydir [PASSED], CPU time=158.38
test 89 : bodyload [PASSED], CPU time=158.5
test 90 : buckling [PASSED], CPU time=161.1
test 91 : channel_v2f [PASSED], CPU time=167.67
test 92 : coating [PASSED], CPU time=171.88
test 93 : current [PASSED], CPU time=172.19
test 94 : current_heat_control [PASSED], CPU time=173.04
test 95 : dft-water [PASSED], CPU time=173.04
test 96 : diffuser_sa [PASSED], CPU time=184.22
test 97 : diffuser_sst [PASSED], CPU time=191.64
test 98 : diffuser_v2f [PASSED], CPU time=208.07
test 99 : el_adaptivity look at [el_adaptivity/test.log] for details
test 100 : elasticity [PASSED], CPU time=208.79
test 101 : elstat [PASSED], CPU time=213.32
test 102 : elstat_source [PASSED], CPU time=213.57
test 103 : fluxsolver [PASSED], CPU time=213.71
test 104 : fluxsolver2 [PASSED], CPU time=213.86
test 105 : freesurf [PASSED], CPU time=214.75
test 106 : freesurf_axi [PASSED], CPU time=216.2
test 107 : freesurf_int [PASSED], CPU time=217.16
test 108 : freesurf_ltd [PASSED], CPU time=223.7
test 109 : fsi_beam [PASSED], CPU time=225.51
test 110 : fsi_beam_optimize [PASSED], CPU time=230.94
test 111 : fsi_box [PASSED], CPU time=233.11
test 112 : fsi_box2 [PASSED], CPU time=234.68
test 113 : geomstiff [PASSED], CPU time=234.96
test 114 : gmultg look at [gmultg/test.log] for details
test 115 : heateq [PASSED], CPU time=235.56
test 116 : heateq-par [PASSED], CPU time=235.69
test 117 : heateq_bdf2 [PASSED], CPU time=235.97
test 118 : heateq_bdf3 [PASSED], CPU time=236.25
test 119 : levelset1 [PASSED], CPU time=240.49
test 120 : levelset2 [PASSED], CPU time=243.06
test 121 : linearsolvers [PASSED], CPU time=243.35
test 122 : linearsolvers_cmplx [PASSED], CPU time=244.61
test 123 : marangoni [PASSED], CPU time=244.8
test 124 : mgdyn_bh [PASSED], CPU time=278.71
test 125 : mgdyn_harmonic [PASSED], CPU time=290.79
test 126 : mgdyn_steady [PASSED], CPU time=299.57
test 127 : mgdyn_torus [PASSED], CPU time=304.96
test 128 : mgdyn_transient [PASSED], CPU time=335.68
test 129 : mhd [PASSED], CPU time=338.38
test 130 : mhd2 [PASSED], CPU time=342
test 131 : multimesh look at [multimesh/test.log] for details
test 132 : normals [PASSED], CPU time=342.13
test 133 : passive [PASSED], CPU time=342.97
test 134 : periodic1 [PASSED], CPU time=343.15
test 135 : periodic2 [PASSED], CPU time=343.32
test 136 : periodic_explicit [PASSED], CPU time=344.99
test 137 : periodic_nonconforming [PASSED], CPU time=345.15
test 138 : periodic_rot [PASSED], CPU time=345.31
test 139 : piezo [PASSED], CPU time=345.5
test 140 : plates [PASSED], CPU time=345.66
test 141 : pointdir [PASSED], CPU time=345.79
test 142 : pointload [PASSED], CPU time=345.92
test 143 : radiation [PASSED], CPU time=346.17
test 144 : radiation2 [PASSED], CPU time=346.41
test 145 : radiation2d [PASSED], CPU time=348.41
test 146 : radiation3d [PASSED], CPU time=354.35
test 147 : reload [PASSED], CPU time=355.48
test 148 : reynolds1 [PASSED], CPU time=357.86
test 149 : reynolds2 [PASSED], CPU time=358.5
test 150 : reynolds3 [PASSED], CPU time=362.48
test 151 : rgdblock [PASSED], CPU time=364.92
test 152 : rot_aniso [PASSED], CPU time=368.66
test 153 : rotflow [PASSED], CPU time=369.54
test 154 : savescalars [PASSED], CPU time=369.98
test 155 : shell [PASSED], CPU time=370.25
test 156 : shell2 [PASSED], CPU time=370.72
test 157 : staged_sim [PASSED], CPU time=372.15
test 158 : streamlines [PASSED], CPU time=372.58
test 159 : stress [PASSED], CPU time=372.92
test 160 : structmap [PASSED], CPU time=373.09
test 161 : tresca [PASSED], CPU time=373.31
test 162 : vortex2d [PASSED], CPU time=374.73
test 163 : vortex3d [PASSED], CPU time=376.67
Tests completed, passed: 154 out of total 163 tests
Cumulative CPU time used in test: 376.67 s
PASS: runtests
==================
All 1 tests passed
==================
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
make[1]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
make[1]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem'
make[1]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem'
Two key things should be done to compile files in fem/src.
1. "CONTIGUOUS" option should be disabled in configure files, Makefile files, and especially acx_elmer.m4.
2. The following directory should be included in $LD_LIBRARY_PATH
/opt/intel/composerxe-2011.2.137/compiler/lib/ia32/
3. By running 'make check' in fem, I got 154 passes out of 163:
Making check in src
make[1]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src'
Making check in view3d
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/view3d'
make[2]: Nothing to be done for `check'.
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/view3d'
Making check in viewaxis
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/viewaxis'
make[2]: Nothing to be done for `check'.
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/viewaxis'
Making check in binio
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
Making check in test
make[3]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make writetest readtest
make[4]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
ifort -I.. -O -fPIC -I. -Ibinio -c -o writetest.o writetest.f90
ifort -I.. -O -fPIC -I. -Ibinio -L. -L/opt/elmer/intel/lib -o writetest writetest.o ../libbinio.a -L. -ldl -L/opt/elmer/intel/lib -lm
ifort -I.. -O -fPIC -I. -Ibinio -c -o readtest.o readtest.f90
ifort -I.. -O -fPIC -I. -Ibinio -L. -L/opt/elmer/intel/lib -o readtest readtest.o ../libbinio.a -L. -ldl -L/opt/elmer/intel/lib -lm
make[4]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make check-TESTS
make[4]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
cp writetest.sh run-writetest
chmod a+x run-writetest
cp readtest.sh run-readtest
chmod a+x run-readtest
PASS: run-writetest
PASS: run-readtest
==================
All 2 tests passed
==================
make[4]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make[3]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio/test'
make[3]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
make[3]: Nothing to be done for `check-am'.
make[3]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src/binio'
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/src'
make[2]: Nothing to be done for `check-am'.
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src'
make[1]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/src'
Making check in tests
make[1]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
make check-TESTS
make[2]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
$ELMER_HOME undefined, setting it to ../src
test 1 : 1dtests [PASSED], CPU time=0.12
test 2 : 1sttime [PASSED], CPU time=0.5
test 3 : 2ndtime [PASSED], CPU time=0.92
test 4 : AdvReactDG [PASSED], CPU time=1.23
test 5 : BlockLinElast1 [PASSED], CPU time=1.62
test 6 : BlockPoisson1 [PASSED], CPU time=1.76
test 7 : BlockPoisson2 [PASSED], CPU time=1.88
test 8 : BlockPoisson3 [PASSED], CPU time=2.32
test 9 : CapacitanceMatrix [PASSED], CPU time=2.6
test 10 : CavityLid [PASSED], CPU time=3.26
test 11 : CavityLid2 [PASSED], CPU time=7.56
test 12 : CoordinateScaling [PASSED], CPU time=7.69
test 13 : CoupledPoisson1 [PASSED], CPU time=7.81
test 14 : CoupledPoisson2 [PASSED], CPU time=8.93
test 15 : CoupledPoisson3 [PASSED], CPU time=9.07
test 16 : CoupledPoisson4 [PASSED], CPU time=9.21
test 17 : CoupledPoisson5 [PASSED], CPU time=9.38
test 18 : CoupledPoisson6 [PASSED], CPU time=9.52
test 19 : CoupledPoisson7 [PASSED], CPU time=9.66
test 20 : CoupledPoisson8 [PASSED], CPU time=9.81
test 21 : CoupledPoisson9 [PASSED], CPU time=9.95
test 22 : CurvedBndryPFEM [PASSED], CPU time=10.06
test 23 : DivergenceAnalytic2D [PASSED], CPU time=10.82
test 24 : ElastElstat1DBeam [PASSED], CPU time=11.02
test 25 : ElastElstatBeam [PASSED], CPU time=11.49
test 26 : ElasticLubrication [PASSED], CPU time=14.59
test 27 : ExtrusionStructured2D [PASSED], CPU time=18.16
test 28 : HeatControl [PASSED], CPU time=18.29
test 29 : HeatControl2 [PASSED], CPU time=18.51
test 30 : HeatGap [PASSED], CPU time=18.67
test 31 : HelmholtzBEM [PASSED], CPU time=19.34
test 32 : HelmholtzEdge [PASSED], CPU time=19.63
test 33 : HelmholtzFEM [PASSED], CPU time=19.9
test 34 : HelmholtzFace [PASSED], CPU time=20.21
test 35 : InductionHeating [PASSED], CPU time=20.7
test 36 : InductionHeating2 [PASSED], CPU time=21.56
test 37 : L2norm look at [L2norm/test.log] for details
test 38 : LimitDisplacement [PASSED], CPU time=24.44
test 39 : LimitTemperature [PASSED], CPU time=28.97
test 40 : LimitTemperature2 [PASSED], CPU time=31.52
test 41 : LubricationTunedForce [PASSED], CPU time=32.45
test 42 : MeshRefineGrading [PASSED], CPU time=36.8
test 43 : NaturalConvection [PASSED], CPU time=43.66
test 44 : NormalTangentialBC [PASSED], CPU time=45.72
test 45 : OptimizeSimplexFourHeaters [PASSED], CPU time=50.37
test 46 : PhaseChange [PASSED], CPU time=53.94
test 47 : PhaseChange2 [PASSED], CPU time=54.86
test 48 : PhaseChange3 [PASSED], CPU time=55.8
test 49 : PoissonBEM [PASSED], CPU time=55.95
test 50 : PoissonDG [PASSED], CPU time=56.2
test 51 : PoissonPFEM [PASSED], CPU time=56.32
test 52 : PorousPipe [PASSED], CPU time=57.32
test 53 : Q1Q0 [PASSED], CPU time=57.53
test 54 : RichardsDyke [PASSED], CPU time=62.57
test 55 : RichardsDyke2 [PASSED], CPU time=72.15
test 56 : RigidMeshMapper1 [PASSED], CPU time=72.74
test 57 : RigidMeshMapper2 [PASSED], CPU time=73.25
test 58 : RotatingFlow [PASSED], CPU time=74.21
test 59 : Step_ke [PASSED], CPU time=76.6
test 60 : Step_ns [PASSED], CPU time=77.14
test 61 : Step_sa [PASSED], CPU time=87.75
test 62 : Step_sst-kw-wf [PASSED], CPU time=107.02
test 63 : Step_stokes [PASSED], CPU time=107.21
test 64 : Step_stokes_block [PASSED], CPU time=107.49
test 65 : Step_v2f [PASSED], CPU time=132.67
test 66 : StokesPFEM [PASSED], CPU time=132.89
test 67 : StokesProj [PASSED], CPU time=133.45
test 68 : StrainCalculation01 [PASSED], CPU time=133.91
test 69 : StrainCalculation02 [PASSED], CPU time=142.01
test 70 : ThermalBiMetal [PASSED], CPU time=142.23
test 71 : ThermalBiMetal2 [PASSED], CPU time=142.49
test 72 : ThermalCompress [PASSED], CPU time=143.41
test 73 : TimeAdapt [PASSED], CPU time=145.49
test 74 : WaveEqu [PASSED], CPU time=145.69
test 75 : adaptivity1 look at [adaptivity1/test.log] for details
test 76 : adaptivity2 look at [adaptivity2/test.log] for details
test 77 : adaptivity3 look at [adaptivity3/test.log] for details
test 78 : adaptivity4 look at [adaptivity4/test.log] for details
test 79 : adaptivity5 look at [adaptivity5/test.log] for details
test 80 : adv_diff1 [PASSED], CPU time=146.93
test 81 : adv_diff2 [PASSED], CPU time=147.93
test 82 : adv_diff3 [PASSED], CPU time=150.8
test 83 : adv_diff4 [PASSED], CPU time=155.33
test 84 : amultg [PASSED], CPU time=156.72
test 85 : amultg2 [PASSED], CPU time=157.24
test 86 : beam-springs [PASSED], CPU time=158.1
test 87 : bentonite [PASSED], CPU time=158.24
test 88 : bodydir [PASSED], CPU time=158.38
test 89 : bodyload [PASSED], CPU time=158.5
test 90 : buckling [PASSED], CPU time=161.1
test 91 : channel_v2f [PASSED], CPU time=167.67
test 92 : coating [PASSED], CPU time=171.88
test 93 : current [PASSED], CPU time=172.19
test 94 : current_heat_control [PASSED], CPU time=173.04
test 95 : dft-water [PASSED], CPU time=173.04
test 96 : diffuser_sa [PASSED], CPU time=184.22
test 97 : diffuser_sst [PASSED], CPU time=191.64
test 98 : diffuser_v2f [PASSED], CPU time=208.07
test 99 : el_adaptivity look at [el_adaptivity/test.log] for details
test 100 : elasticity [PASSED], CPU time=208.79
test 101 : elstat [PASSED], CPU time=213.32
test 102 : elstat_source [PASSED], CPU time=213.57
test 103 : fluxsolver [PASSED], CPU time=213.71
test 104 : fluxsolver2 [PASSED], CPU time=213.86
test 105 : freesurf [PASSED], CPU time=214.75
test 106 : freesurf_axi [PASSED], CPU time=216.2
test 107 : freesurf_int [PASSED], CPU time=217.16
test 108 : freesurf_ltd [PASSED], CPU time=223.7
test 109 : fsi_beam [PASSED], CPU time=225.51
test 110 : fsi_beam_optimize [PASSED], CPU time=230.94
test 111 : fsi_box [PASSED], CPU time=233.11
test 112 : fsi_box2 [PASSED], CPU time=234.68
test 113 : geomstiff [PASSED], CPU time=234.96
test 114 : gmultg look at [gmultg/test.log] for details
test 115 : heateq [PASSED], CPU time=235.56
test 116 : heateq-par [PASSED], CPU time=235.69
test 117 : heateq_bdf2 [PASSED], CPU time=235.97
test 118 : heateq_bdf3 [PASSED], CPU time=236.25
test 119 : levelset1 [PASSED], CPU time=240.49
test 120 : levelset2 [PASSED], CPU time=243.06
test 121 : linearsolvers [PASSED], CPU time=243.35
test 122 : linearsolvers_cmplx [PASSED], CPU time=244.61
test 123 : marangoni [PASSED], CPU time=244.8
test 124 : mgdyn_bh [PASSED], CPU time=278.71
test 125 : mgdyn_harmonic [PASSED], CPU time=290.79
test 126 : mgdyn_steady [PASSED], CPU time=299.57
test 127 : mgdyn_torus [PASSED], CPU time=304.96
test 128 : mgdyn_transient [PASSED], CPU time=335.68
test 129 : mhd [PASSED], CPU time=338.38
test 130 : mhd2 [PASSED], CPU time=342
test 131 : multimesh look at [multimesh/test.log] for details
test 132 : normals [PASSED], CPU time=342.13
test 133 : passive [PASSED], CPU time=342.97
test 134 : periodic1 [PASSED], CPU time=343.15
test 135 : periodic2 [PASSED], CPU time=343.32
test 136 : periodic_explicit [PASSED], CPU time=344.99
test 137 : periodic_nonconforming [PASSED], CPU time=345.15
test 138 : periodic_rot [PASSED], CPU time=345.31
test 139 : piezo [PASSED], CPU time=345.5
test 140 : plates [PASSED], CPU time=345.66
test 141 : pointdir [PASSED], CPU time=345.79
test 142 : pointload [PASSED], CPU time=345.92
test 143 : radiation [PASSED], CPU time=346.17
test 144 : radiation2 [PASSED], CPU time=346.41
test 145 : radiation2d [PASSED], CPU time=348.41
test 146 : radiation3d [PASSED], CPU time=354.35
test 147 : reload [PASSED], CPU time=355.48
test 148 : reynolds1 [PASSED], CPU time=357.86
test 149 : reynolds2 [PASSED], CPU time=358.5
test 150 : reynolds3 [PASSED], CPU time=362.48
test 151 : rgdblock [PASSED], CPU time=364.92
test 152 : rot_aniso [PASSED], CPU time=368.66
test 153 : rotflow [PASSED], CPU time=369.54
test 154 : savescalars [PASSED], CPU time=369.98
test 155 : shell [PASSED], CPU time=370.25
test 156 : shell2 [PASSED], CPU time=370.72
test 157 : staged_sim [PASSED], CPU time=372.15
test 158 : streamlines [PASSED], CPU time=372.58
test 159 : stress [PASSED], CPU time=372.92
test 160 : structmap [PASSED], CPU time=373.09
test 161 : tresca [PASSED], CPU time=373.31
test 162 : vortex2d [PASSED], CPU time=374.73
test 163 : vortex3d [PASSED], CPU time=376.67
Tests completed, passed: 154 out of total 163 tests
Cumulative CPU time used in test: 376.67 s
PASS: runtests
==================
All 1 tests passed
==================
make[2]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
make[1]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem/tests'
make[1]: Entering directory `/home/albertsk/packages/elmerfem-intel/fem'
make[1]: Leaving directory `/home/albertsk/packages/elmerfem-intel/fem'
Coding Guidelines for Intel® Architectures
Do J
Do I
is better than
Do I
Do J
http://www.ncsa.illinois.edu/UserInfo/Resources/Software/Intel/Compilers/8.1/f_ug2/code_arch.htm
Do I
is better than
Do I
Do J
http://www.ncsa.illinois.edu/UserInfo/Resources/Software/Intel/Compilers/8.1/f_ug2/code_arch.htm
Wednesday, March 23, 2011
Mesh generation (free) software
Monday, March 21, 2011
capturing messages from configure and make
To capture the output of the configure and make steps you can use the script command or the following technique if using a Bourne style shell:
or if using a csh style shell:shell$ ./configure {options} 2>&1 | tee config.out shell$ make all 2>&1 | tee make.out shell$ make install 2>&1 | tee make-install.out
shell% ./configure {options} |& tee config.out shell% make all |& tee make.out shell% make install |& tee make-install.out
Thursday, March 17, 2011
cluster pach update using yum and repoman
Best Practice for Patching Individual Packages in PCM
Document Number | 1-1F6K86 |
Product | Platform OCS/PCM |
Affected Version | 1.2* 2.0* 2.1* |
OS | Linux |
Category | Installation/Upgrade |
Date Created | Jan 21 2009, 10:15 PM |
Last Update | Nov 10 2010, 11:14 AM |
Keywords | Update, patch, rhn, repopatch, kernel, |
Topic
Best Practice for Patching Individual Packages in PCM
Issue Description
I need to update the kernel package on my PCM cluster with the latest and greatest avaialbe on RHN.
Solution Detail
Often, it is necessary to apply patches to your PCM cluster. Some patches fix security issues, while other patches offer bug fixes and feature improvements. Patches can be applied in two ways.
1. One way is to use kusu-repopatch tool to download and install all available patches for your Operating System. This has the advantage of having your cluster nodes always at the "latest and greatest" version level. There are a few down sides with this approach, namely that updating large number of packages is more risky and can break your production environment, and also if you are using Platform OFED for InfiniBand support, kusu-repopatch cannot be used due to dependency issues. This last point is only valid for PCM versions < 2.1. In 2.1, this issue is addressed, and kusu-repopatch can safely be used even with Platform OFED kit installed.
2. The other way is to download the individual packages manually using yum command, and then add them to your PCM repository. The advantage is that updates can be done very quickly and relatively safely. The disadvantage is that not all of your packages are at the "latest and greatest" version level.
This article describes the second approach, That is, how to patch individual packages in your PCM cluster. As a working example, the article shows how to patch the kernel package.
If you are interested in using kusu-repopatch to patch all the packages, please refer to the article titled "Best Practice for Patching PCM Clusters with kusu-repopatch."
0. Before you begin
Before applying any patches to your production environment, you should take some precautions in case something goes wrong. In your PCM cluster you can take advantage of repository snapshots and node group templates to apply your updates in a safe way.
NOTE: In older version of PCM (< 2.0) the above commands did not have the "kusu-" prefix. For example, repoman vs. kusu-repoman. To determine which version of PCM cluster you are using,you can look at the output of cat /etc/kusu-release.
1. Use kusu-repoman command to get the name of your production repository. For example,
pcm201installer:~ # kusu-repoman -l
Repo name: sles-10.3-x86_64
Repository: /depot/repos/1000
Installers: 192.168.1.1;172.27.1.43
Ostype: sles-10-x86_64
Kits: base-2.0-1-x86_64, sles-10.3-x86_64,
nagios-2.12-7-x86_64, ofed-2.0-1-x86_64,
PCM_GUI-2.0-1-x86_64, platform-hpc-2.0-2-x86_64,
platform-isf-ac-1.0- 1-x86_64, platform-lsf-7.0.6-1-x86_64,
platform-mpi-7.1-1-x86_64, platform-rtm-2.0.1-1-x86_64
Repo name: sles-10.3-x86_64
Repository: /depot/repos/1000
Installers: 192.168.1.1;172.27.1.43
Ostype: sles-10-x86_64
Kits: base-2.0-1-x86_64, sles-10.3-x86_64,
nagios-2.12-7-x86_64, ofed-2.0-1-x86_64,
PCM_GUI-2.0-1-x86_64, platform-hpc-2.0-2-x86_64,
platform-isf-ac-1.0-
platform-mpi-7.1-1-x86_64, platform-rtm-2.0.1-1-x86_64
The name of the repository is sles-10.3-x86_64, and it has an ID of 1000. The repository is located under /depot/repos/1000/ directory, and it was created during PCM installation.
2. Create a snapshot of your production repository with kusu-repoman command. This will create a new repository with ID 1001 under /depot/repos/1001/ directory.
# kusu-repoman -r -s
3. Create a new node group which is a copy of your production node group with kusu-ngedit command. You can use kusu-nghosts -l command to determine the name of your production node group, first. Then, use kusu-ngedit to create a copy called compute-patchtest, like this:
# kusu-ngedit -c compute-rhel-5.4-x86-64 -n compute-patchtest
4. Associate the compute-patchtest node group with snapshot repository using the kusu-ngedit command.
5. Move a single node to compute-patchtest node group using kusu-nghosts command.
1. Register with Red Hat Network (RHN)
Fist step is to to register your cluster to Red Hat Network. If you are using SLES, then please use equivalent procedure to register for online updates. If you are using CentOS or SCL, then you do not need to register your cluster, but you might need to enable the online repository in /etc/yum.repos.d/ directory.
Do following to register your cluster with RHN:
1. run rhn_register on your Installer node
2. Update the /opt/kusu/etc/updates.conf file and enter valid username/password to login to RHN as well as Server ID (a.k.a RHN System ID).
NOTE: Server ID is not needed if you are running PCM version < 2.0.
2. Download and Install the Patches
Yum will, by default, download and install the updated packages. However, since PCM will be responsible for installing the updated packages on cluster nodes, we just need yum to download the package, and not install them. For this, you need to download the downloadonly plugin for yum. Follow these steps to download and install the kernel patches.
1. Download the downloadonly plugin from RHN:
# yum -y install yum-downloadonly
2. Create a snapshot of your production repository. Best practice is to always test the updates prior to applying them to production repository. Therefore, you should first create a snapshot of your production repository and .....
3. Download latest kernel packages from RHN. There are three packages which you must update: kernel, kernel-devel and kernel-headers. The packages must be downloaded to /depot/contrib/1001/ directory; this is where PCM will look for custom RPMs associated with snapshot repository (id = 1001).
# yum update -y kernel kernel-devel kernel-headers --downloadonly --downloaddir=/depot/contrib/ 1001/
4. Update the snapshot repository:
# kusu-repoman -ur "name of snapshot repository"
This will create appropriate symbolic links in /depot/repos/1001/Server/ directory, and it may take up to 10 minutes to complete. The symbolic links to the latest kernel packages are shown below:
[root@pcm201installer ~]# ll /depot/repos/1000/Server/ kernel-*
lrwxrwxrwx 1 root root 59 Oct 21 12:13 /depot/repos/1000/Server/ kernel-2.6.18-194.17.1.el5. x86_64.rpm -> ../../../contrib/1000/kernel- 2.6.18-194.17.1.el5.x86_64.rpm
lrwxrwxrwx 1 root root 65 Oct 21 12:13 /depot/repos/1000/Server/ kernel-devel-2.6.18-194.17.1. el5.x86_64.rpm -> ../../../contrib/1000/kernel- devel-2.6.18-194.17.1.el5.x86_ 64.rpm
lrwxrwxrwx 1 root root 67 Oct 21 12:13 /depot/repos/1000/Server/ kernel-headers-2.6.18-194.17. 1.el5.x86_64.rpm -> ../../../contrib/1000/kernel- headers-2.6.18-194.17.1.el5. x86_64.rpm
lrwxrwxrwx 1 root root 59 Oct 21 12:13 /depot/repos/1000/Server/
lrwxrwxrwx 1 root root 65 Oct 21 12:13 /depot/repos/1000/Server/
lrwxrwxrwx 1 root root 67 Oct 21 12:13 /depot/repos/1000/Server/
5. Install the patches on nodes in the compute-testpatch node group. Use the kusu-cfmsync command to install the patches "on-the-fly" - there is no need to re-provision the nodes to install the patches in PCM. However, if you have patched the kernel, you need to reboot the nodes using kusu-boothost command to boot them with the new kernel.
# kusu-cfmsync -n compute-testpatch -u
# kusu-boothost -r "hostname of patched node"
# kusu-boothost -r "hostname of patched node"
3. Test the Patches and Install in Production Environment
The only thing that remains is to test the patches work ok for nodes in compute-testpatch node group. Do as much testing as required to convince yourself that your production applications will work with OK after the patches were applied. For example, you may wish to do the following tests:
- Test that nodes in compute-testpatch node group can be re-provisioned correctly
- Test that patched packages are installed after re-provisioning. For kernel patches, run uname -r command to make sure kernel version is the latest one.
- If you have patched the Installer node as well, then you can run command like kusu-genconfig nodes and make sure you get a list of all node hostnames in you cluster. This will confirm that PCM tools can communicate to the cluster database
Once you are satisfied that the patches are safe to roll out to your production environment, you can follow the steps outlined in section 2 "Download and Install the Patches" to patch your production repository. The only difference is that now you should specify your production node group and installer node group name when you apply the updates via kusu-cfmsync -u
Friday, March 4, 2011
Raster3D version 2.9.2 installation on Ubuntu Marverik
Install Raster3D_2.9-2/
Install
- Tiff ftp://ftp.remotesensing.org/pub/libtiff/tiff-3.9.4.zip
- gdlib http://www.libgd.org/releases/gd-2.0.35.tar.gz
Other graphic library
- sudo apt-get install libjpeg-dbg
- sudo apt-get install libjpeg-dev
- sudo apt-get install libpng-dev libpng12-0-dev libpng3-dev libpngwriter libpngwriter-dev libtk-png-perl pnmtopng
- sudo apt-get install xutils-dev
Wednesday, March 2, 2011
Subscribe to:
Posts (Atom)
Labels
- Academic Notes (4)
- BLAS (1)
- CEE618 (2)
- CentOS (1)
- Computation (1)
- Conferences (1)
- Cross compiling (1)
- Graphics (1)
- Hawaii Tour (2)
- Intel compiler (1)
- Just (1)
- Laptop (1)
- LaTex (22)
- Life and Humanity (8)
- Linux and Computing (107)
- LSF (1)
- Lyx (3)
- Macintosh (5)
- MPI (1)
- News (11)
- OpenFoam (1)
- OpenMPI (1)
- PBS (2)
- Software (4)
- SSH (2)
- torque (1)
- Touchpad (1)
- VMD (1)
- yum (1)
Blog Archive
-
▼
2011
(59)
-
▼
March
(11)
- Jaws parallel job submission
- Clone Remote Subversion Repositories With svnsync
- CFD Online
- How to install OpenFOAM in Mac
- Installation of Elmer CFD using Intel Compilers ve...
- Coding Guidelines for Intel® Architectures
- Mesh generation (free) software
- capturing messages from configure and make
- cluster pach update using yum and repoman
- Raster3D version 2.9.2 installation on Ubuntu Marv...
- iPhone
-
▼
March
(11)