or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

callbacks.mdconfiguration.mdcore-api.mddistributed-computing.mdindex.mdsklearn-interface.mdvisualization.md

configuration.mddocs/

0

# Configuration

1

2

XGBoost provides global configuration management for controlling library behavior, including verbosity, memory management, GPU coordination, and parameter validation. These settings affect all XGBoost operations within the current Python session.

3

4

## Capabilities

5

6

### Global Configuration Management

7

8

Set and get global configuration parameters that affect XGBoost behavior across all operations.

9

10

```python { .api }

11

def set_config(

12

verbosity=None,

13

use_rmm=None,

14

rmm_pool_size=None,

15

parameter_validation=None,

16

gpu_coordinate_descent=None

17

):

18

"""

19

Set global configuration parameters.

20

21

Parameters:

22

- verbosity: Verbosity level (0=silent, 1=warning, 2=info, 3=debug)

23

- use_rmm: Whether to use RAPIDS Memory Manager for GPU memory allocation

24

- rmm_pool_size: RMM memory pool size in bytes

25

- parameter_validation: Whether to validate parameters (True/False)

26

- gpu_coordinate_descent: Whether to use GPU for coordinate descent

27

28

Returns:

29

None

30

31

Note:

32

Configuration changes affect all subsequent XGBoost operations.

33

"""

34

35

def get_config():

36

"""

37

Get current global configuration.

38

39

Returns:

40

dict: Dictionary containing current configuration parameters

41

"""

42

43

def config_context(**config):

44

"""

45

Context manager for temporary configuration changes.

46

47

Parameters:

48

**config: Configuration parameters to set temporarily

49

50

Returns:

51

Context manager that restores original configuration on exit

52

53

Example:

54

with xgb.config_context(verbosity=0):

55

# XGBoost operations run silently

56

model = xgb.train(params, dtrain)

57

# Original verbosity restored

58

"""

59

```

60

61

### Build Information

62

63

Access build and system information about the XGBoost installation.

64

65

```python { .api }

66

def build_info():

67

"""

68

Get build information about XGBoost installation.

69

70

Returns:

71

dict: Dictionary containing build information including:

72

- USE_OPENMP: OpenMP support status

73

- USE_CUDA: CUDA support status

74

- USE_NCCL: NCCL support status

75

- USE_RMM: RMM support status

76

- BUILD_WITH_SHARED_LIB: Shared library build status

77

- GCC_VERSION: GCC version used for compilation

78

- CUDA_VERSION: CUDA version if available

79

And other build-time configuration details

80

"""

81

```

82

83

## Configuration Parameters

84

85

### Verbosity Control

86

87

```python

88

import xgboost as xgb

89

90

# Set verbosity levels

91

xgb.set_config(verbosity=0) # Silent

92

xgb.set_config(verbosity=1) # Warnings only

93

xgb.set_config(verbosity=2) # Info messages

94

xgb.set_config(verbosity=3) # Debug output

95

```

96

97

### GPU Memory Management

98

99

```python

100

# Enable RMM for GPU memory management

101

xgb.set_config(use_rmm=True, rmm_pool_size=1024**3) # 1GB pool

102

103

# Disable RMM

104

xgb.set_config(use_rmm=False)

105

```

106

107

### Parameter Validation

108

109

```python

110

# Enable parameter validation (default)

111

xgb.set_config(parameter_validation=True)

112

113

# Disable parameter validation for performance

114

xgb.set_config(parameter_validation=False)

115

```

116

117

## Usage Examples

118

119

### Basic Configuration Management

120

121

```python

122

import xgboost as xgb

123

124

# Check current configuration

125

config = xgb.get_config()

126

print("Current config:", config)

127

128

# Set global verbosity

129

xgb.set_config(verbosity=1)

130

131

# Train model with current settings

132

dtrain = xgb.DMatrix(X_train, label=y_train)

133

model = xgb.train(params, dtrain, num_boost_round=100)

134

```

135

136

### Temporary Configuration Changes

137

138

```python

139

import xgboost as xgb

140

141

# Normal verbosity

142

print("Current verbosity:", xgb.get_config()['verbosity'])

143

144

# Temporarily change configuration

145

with xgb.config_context(verbosity=0):

146

# Silent training

147

model = xgb.train(params, dtrain, num_boost_round=100)

148

print("Training completed silently")

149

150

# Original verbosity restored

151

print("Verbosity restored:", xgb.get_config()['verbosity'])

152

```

153

154

### GPU Configuration

155

156

```python

157

import xgboost as xgb

158

159

# Check build info for GPU support

160

build_info = xgb.build_info()

161

print("CUDA support:", build_info.get('USE_CUDA', False))

162

print("RMM support:", build_info.get('USE_RMM', False))

163

164

if build_info.get('USE_CUDA'):

165

# Configure GPU memory management

166

xgb.set_config(

167

use_rmm=True,

168

rmm_pool_size=2 * 1024**3, # 2GB memory pool

169

gpu_coordinate_descent=True

170

)

171

172

# Train on GPU

173

params = {

174

'objective': 'reg:squarederror',

175

'tree_method': 'gpu_hist',

176

'device': 'cuda'

177

}

178

model = xgb.train(params, dtrain)

179

```

180

181

### Performance Optimization

182

183

```python

184

import xgboost as xgb

185

186

# Optimize for production (disable validation, reduce verbosity)

187

xgb.set_config(

188

verbosity=0,

189

parameter_validation=False

190

)

191

192

# Training will run faster but with less safety checks

193

model = xgb.train(params, dtrain, num_boost_round=100)

194

195

# Re-enable for development

196

xgb.set_config(

197

verbosity=1,

198

parameter_validation=True

199

)

200

```

201

202

### Configuration in Different Environments

203

204

```python

205

import xgboost as xgb

206

import os

207

208

# Environment-specific configuration

209

if os.getenv('XGB_DEBUG'):

210

xgb.set_config(verbosity=3) # Debug mode

211

elif os.getenv('XGB_QUIET'):

212

xgb.set_config(verbosity=0) # Silent mode

213

else:

214

xgb.set_config(verbosity=1) # Default warnings

215

216

# Check environment capabilities

217

build_info = xgb.build_info()

218

if build_info.get('USE_OPENMP'):

219

print("OpenMP parallelization available")

220

if build_info.get('USE_CUDA'):

221

print("GPU acceleration available")

222

```

223

224

### Context Manager for Different Tasks

225

226

```python

227

import xgboost as xgb

228

229

# Different configurations for different tasks

230

def train_model_quietly(params, dtrain):

231

with xgb.config_context(verbosity=0):

232

return xgb.train(params, dtrain, num_boost_round=100)

233

234

def debug_model_training(params, dtrain):

235

with xgb.config_context(verbosity=3, parameter_validation=True):

236

return xgb.train(params, dtrain, num_boost_round=10)

237

238

# Use appropriate configuration for each task

239

production_model = train_model_quietly(params, dtrain)

240

debug_model = debug_model_training(params, dtrain)

241

```

242

243

## Configuration Best Practices

244

245

### Development vs Production

246

247

```python

248

# Development configuration

249

xgb.set_config(

250

verbosity=2, # Show info messages

251

parameter_validation=True # Validate parameters

252

)

253

254

# Production configuration

255

xgb.set_config(

256

verbosity=1, # Warnings only

257

parameter_validation=False # Skip validation for speed

258

)

259

```

260

261

### Resource Management

262

263

```python

264

# For GPU environments with limited memory

265

if xgb.build_info().get('USE_CUDA'):

266

xgb.set_config(

267

use_rmm=True,

268

rmm_pool_size=512 * 1024**2 # 512MB pool

269

)

270

271

# For distributed training

272

xgb.set_config(verbosity=1) # Reduce log noise across workers

273

```