/etc/code_saturne.cfg.template is in code-saturne-data 3.2.1-1build1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | ### This file configures various Code_Saturne options.
###
### The commented-out examples below are intended to demonstrate
### how to use this file.
### Section for Code_Saturne installation customizations.
[install]
### Select the batch system type and job template.
# batch =
###
### Define installation prefixes of alternate builds for compute tasks.
### Either the absolute name or the base name of the installation prefix may
### be used (using the more concise base name assumes a consistent naming
### scheme, with builds installed side-by-side).
# compute_versions =
###
### Set the location to the SYRTHES installation directory.
# syrthes =
###
### Set the location to the Code_Aster installation directory.
# aster =
###
### Optional path to sourcable shell environment initialization file
### (similar to .profile or .bashrc, specific to Code_Saturne).
# rcfile =
### End of section.
### Section for Code_Saturne run customizations.
[run]
### Set the temporary directory.
# scratchdir = /scratch/%(user)s
###
### Set the mesh database directory.
# meshpath =
### End of section.
### Section for Code_Saturne MPI customizations.
### Due to the wide variety of MPI implementations and build options,
### the default configuration may not give correct values in some cases,
### so the configuration options defined here allow redefining some options.
[mpi]
### Path to mpi binaries
# mpi_bindir =
### mpiexec, mpirun, or equivalent command
# mpiexec = mpiexec
### mpiexec command options
# mpiexec_opts =
### Option to pass arguments (usually none, or -args)
# mpiexec_args =
### Option to define executable (usually none, or -exe)
# mpi_env.mpiexec_exe =
### Option to define number of ranks (e.g. ' -n ', ' -np '; trailing whitespace
### is significant, as SLURM for example requires ' -n', for a -n<n_procs>
### type syntax, so quotes may be used here)
# mpi_env.mpiexec_n = ' -n '
### Option to define number of ranks per node (e.g. ' -ppn 6 ', ' --ranks-per-node 16 ')
# mpiexec_n_per_node =
### Separator after mpiexec options (':' for Blue Gene/Q without SLURM)
# mpiexec_separator =
### Shell command to generate hostsfile if required. When using a fixed
### hostfile, passing it in mpiexec_opts is simpler, so this command is
### only useful when using a resource manager which is not handled correctly
### by the MPI library.
# gen_hostsfile =
### Shell command to delete hostsfile if required
# del_hostsfile =
### Command to start environment (e.g. mpdboot for some MPICH2/MPICH-3 builds)
# mpiboot =
### Command to halt environment (e.g. mpdallexit after mpdboot)
# mpihalt =
### Multiple Program Multiple Data mode: 'mpiexec' (mpiexec ':'-separated syntax),
### 'configfile' (mpiexec -configfile syntax), or 'script'
# mpmd = mpiexec
### End of section.
|