-
Notifications
You must be signed in to change notification settings - Fork 0
/
blob.py
109 lines (100 loc) · 3.94 KB
/
blob.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 17 14:12:46 2017
@author: shiwu_001
"""
from config import DTYPE, USE_GPU
import pycuda.gpuarray as garr
from numpy import zeros
class Blob(object):
def __init__(self, blob, copy=False):
self.hold_ = copy
self.blob_ = None
self.count = blob.count
self.shape = blob.shape
if copy:
# only used when the blob holds data by itself
# to save unnecessary copys
self.last_data_cpu_ = True
self.last_diff_cpu_ = True
# lazy scheme to allocate memory
# self.data_ = zeros(blob.shape, dtype=DTYPE)
# self.diff_ = zeros(blob.shape, dtype=DTYPE)
self.data_ = None
self.diff_ = None
if USE_GPU:
# self.gpu_data_ = garr.zeros(shape=blob.shape, dtype=DTYPE)
# self.gpu_diff_ = garr.zeros(shape=blob.shape, dtype=DTYPE)
self.gpu_data_ = None
self.gpu_diff_ = None
else:
self.blob_ = blob
self.data_ = None
self.diff_ = None
if USE_GPU:
self.gpu_data_ = garr.GPUArray(shape=blob.shape, dtype=DTYPE,
gpudata=blob.gpu_data_ptr)
self.gpu_diff_ = garr.GPUArray(shape=blob.shape, dtype=DTYPE,
gpudata=blob.gpu_diff_ptr)
@property
def data(self):
if self.hold_:
if self.data_ is None:
self.data_ = zeros(self.shape, dtype=DTYPE)
if not self.last_data_cpu_:
self.gpu_data_.get(self.data_)
self.last_data_cpu_ = True
else:
self.data_ = self.blob_.data
return self.data_
@property
def gpu_data(self):
if self.hold_:
if self.gpu_data_ is None:
self.gpu_data_ = garr.zeros(shape=self.shape, dtype=DTYPE)
if self.last_data_cpu_ and self.data_ is not None:
self.gpu_data_.set(self.data_)
self.last_data_cpu_ = False
else:
# call gpu_data_ptr to update data on the device
self.blob_.gpu_data_ptr
return self.gpu_data_
@property
def diff(self):
if self.hold_:
if self.diff_ is None:
self.diff_ = zeros(self.shape, dtype=DTYPE)
if not self.last_diff_cpu_:
self.gpu_diff_.get(self.diff_)
self.last_diff_cpu_ = True
else:
self.diff_ = self.blob_.diff
return self.diff_
@property
def gpu_diff(self):
if self.hold_:
if self.gpu_diff_ is None:
self.gpu_diff_ = garr.zeros(shape=self.shape, dtype=DTYPE)
if self.last_diff_cpu_ and self.diff_ is not None:
self.gpu_diff_.set(self.diff_)
self.last_diff_cpu_ = False
else:
# call gpu_diff_ptr to update diff on the device
self.blob_.gpu_diff_ptr
return self.gpu_diff_
def share_data(self, other):
if not (self.hold_ or other.hold_):
self.blob_.share_data(other.blob_)
self.data_ = None
self.gpu_data_ = garr.GPUArray(shape=self.shape, dtype=DTYPE,
gpudata=self.blob_.gpu_data_ptr)
else:
raise ValueError("Can't share data from or to a copied blob.")
def share_diff(self, other):
if not (self.hold_ or other.hold_):
self.blob_.share_diff(other.blob_)
self.diff_ = None
self.gpu_diff_ = garr.GPUArray(shape=self.shape, dtype=DTYPE,
gpudata=self.blob_.gpu_diff_ptr)
else:
raise ValueError("Can't share diff from or to a copied blob.")