or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

index.mdjson.mdmsgpack.mdpickle.mdujson.md

pickle.mddocs/

0

# Pickle Operations

1

2

Enhanced pickle serialization using cloudpickle for cloud computing compatibility, supporting complex objects including functions, lambdas, classes, and other Python constructs that standard pickle cannot handle.

3

4

## Capabilities

5

6

### Pickle Serialization

7

8

Core pickle operations using cloudpickle for enhanced compatibility with cloud computing environments and complex Python objects.

9

10

```python { .api }

11

def pickle_dumps(data, protocol=None):

12

"""

13

Serialize Python object with cloudpickle.

14

15

Parameters:

16

- data: Python object to serialize (supports functions, lambdas, classes)

17

- protocol (int | None): Pickle protocol version (-1 for highest available)

18

19

Returns:

20

bytes: Serialized pickle data

21

"""

22

23

def pickle_loads(data):

24

"""

25

Deserialize pickle bytes to Python object.

26

27

Parameters:

28

- data (bytes): Pickle data to deserialize

29

30

Returns:

31

Any: Deserialized Python object

32

"""

33

```

34

35

### Advanced CloudPickle Classes

36

37

Direct access to cloudpickle functionality for custom serialization scenarios.

38

39

```python { .api }

40

class CloudPickler:

41

"""

42

Enhanced pickler class with cloud computing support.

43

44

Handles serialization of functions, lambdas, classes, and other

45

constructs that standard pickle cannot serialize.

46

"""

47

def __init__(self, file, protocol=None, buffer_callback=None):

48

"""

49

Initialize CloudPickler.

50

51

Parameters:

52

- file: File-like object to write to

53

- protocol (int): Pickle protocol version

54

- buffer_callback (callable): Buffer callback for pickle protocol 5

55

"""

56

57

def dump(self, obj):

58

"""

59

Serialize object to file.

60

61

Parameters:

62

- obj: Object to serialize

63

"""

64

65

# Compatibility alias

66

Pickler = CloudPickler

67

68

def dump(obj, file, protocol=None):

69

"""

70

Serialize object to file using cloudpickle.

71

72

Parameters:

73

- obj: Object to serialize

74

- file: File-like object to write to

75

- protocol (int): Pickle protocol version

76

"""

77

78

def dumps(obj, protocol=None):

79

"""

80

Serialize object to bytes (alias for pickle_dumps).

81

82

Parameters:

83

- obj: Object to serialize

84

- protocol (int): Pickle protocol version

85

86

Returns:

87

bytes: Serialized data

88

"""

89

90

def loads(data):

91

"""

92

Deserialize from bytes (alias for pickle.loads from standard library).

93

94

Parameters:

95

- data (bytes): Serialized data

96

97

Returns:

98

Any: Deserialized object

99

"""

100

101

def load(file):

102

"""

103

Deserialize from file-like object (alias for pickle.load from standard library).

104

105

Parameters:

106

- file: File-like object containing pickled data

107

108

Returns:

109

Any: Deserialized object

110

"""

111

```

112

113

## Usage Examples

114

115

### Basic Pickle Operations

116

117

```python

118

import srsly

119

120

# Standard data structures

121

data = {

122

"name": "example",

123

"values": [1, 2, 3, 4, 5],

124

"config": {"debug": True, "timeout": 30}

125

}

126

127

# Serialize to bytes

128

pickled_data = srsly.pickle_dumps(data)

129

print(f"Pickled size: {len(pickled_data)} bytes")

130

131

# Deserialize from bytes

132

unpickled_data = srsly.pickle_loads(pickled_data)

133

print(f"Name: {unpickled_data['name']}")

134

print(f"Debug: {unpickled_data['config']['debug']}")

135

```

136

137

### Function and Lambda Serialization

138

139

```python

140

import srsly

141

142

# Define functions and lambdas

143

def custom_function(x, y):

144

"""Custom function that can be pickled with cloudpickle."""

145

return x * y + 10

146

147

lambda_func = lambda x: x ** 2 + 1

148

149

# Serialize functions (not possible with standard pickle)

150

func_data = srsly.pickle_dumps(custom_function)

151

lambda_data = srsly.pickle_dumps(lambda_func)

152

153

# Deserialize and use functions

154

restored_func = srsly.pickle_loads(func_data)

155

restored_lambda = srsly.pickle_loads(lambda_data)

156

157

print(f"Function result: {restored_func(3, 4)}") # 22

158

print(f"Lambda result: {restored_lambda(5)}") # 26

159

```

160

161

### Class and Complex Object Serialization

162

163

```python

164

import srsly

165

166

# Define a custom class

167

class DataProcessor:

168

def __init__(self, multiplier=2):

169

self.multiplier = multiplier

170

self.history = []

171

172

def process(self, value):

173

result = value * self.multiplier

174

self.history.append((value, result))

175

return result

176

177

def get_stats(self):

178

if not self.history:

179

return {"count": 0, "avg_input": 0, "avg_output": 0}

180

181

inputs = [h[0] for h in self.history]

182

outputs = [h[1] for h in self.history]

183

return {

184

"count": len(self.history),

185

"avg_input": sum(inputs) / len(inputs),

186

"avg_output": sum(outputs) / len(outputs)

187

}

188

189

# Create and use processor

190

processor = DataProcessor(multiplier=3)

191

processor.process(10)

192

processor.process(20)

193

processor.process(15)

194

195

# Serialize the entire object with state

196

processor_data = srsly.pickle_dumps(processor)

197

198

# Deserialize and continue using

199

restored_processor = srsly.pickle_loads(processor_data)

200

print(f"Stats: {restored_processor.get_stats()}")

201

print(f"Processing 25: {restored_processor.process(25)}")

202

```

203

204

### Advanced Usage with Protocol Selection

205

206

```python

207

import srsly

208

209

# Complex nested data structure

210

complex_data = {

211

"functions": {

212

"add": lambda x, y: x + y,

213

"multiply": lambda x, y: x * y

214

},

215

"data": list(range(1000)),

216

"metadata": {"version": "1.0", "author": "system"}

217

}

218

219

# Use highest protocol for best compression

220

pickled_high = srsly.pickle_dumps(complex_data, protocol=-1)

221

print(f"High protocol size: {len(pickled_high)} bytes")

222

223

# Use specific protocol

224

pickled_v4 = srsly.pickle_dumps(complex_data, protocol=4)

225

print(f"Protocol 4 size: {len(pickled_v4)} bytes")

226

227

# Deserialize and test functions

228

restored = srsly.pickle_loads(pickled_high)

229

add_func = restored["functions"]["add"]

230

multiply_func = restored["functions"]["multiply"]

231

232

print(f"Add function: {add_func(5, 3)}") # 8

233

print(f"Multiply function: {multiply_func(4, 7)}") # 28

234

print(f"Data length: {len(restored['data'])}") # 1000

235

```

236

237

### File-like Object Operations

238

239

```python

240

import srsly.cloudpickle as cloudpickle

241

import io

242

243

# Serialize to file-like object

244

data = {"message": "Hello, world!", "numbers": [1, 2, 3]}

245

buffer = io.BytesIO()

246

247

# Use cloudpickle directly for file operations

248

cloudpickle.dump(data, buffer)

249

250

# Read back from buffer

251

buffer.seek(0)

252

loaded_data = cloudpickle.load(buffer)

253

print(f"Message: {loaded_data['message']}")

254

255

# Alternative: use the high-level functions

256

buffer = io.BytesIO()

257

pickled = srsly.pickle_dumps(data)

258

buffer.write(pickled)

259

buffer.seek(0)

260

restored = srsly.pickle_loads(buffer.read())

261

print(f"Numbers: {restored['numbers']}")

262

```