Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
3f8ea5cb
"vscode:/vscode.git/clone" did not exist on "33123bee8554af85989bc7188bf3c7dc1ba5e8c6"
Commit
3f8ea5cb
authored
Apr 12, 2017
by
Neal Wu
Browse files
Fixes for differential_privacy
parent
0b1e767f
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
38 additions
and
38 deletions
+38
-38
differential_privacy/multiple_teachers/analysis.py
differential_privacy/multiple_teachers/analysis.py
+3
-3
differential_privacy/multiple_teachers/deep_cnn.py
differential_privacy/multiple_teachers/deep_cnn.py
+28
-28
differential_privacy/multiple_teachers/input.py
differential_privacy/multiple_teachers/input.py
+3
-3
differential_privacy/multiple_teachers/train_teachers.py
differential_privacy/multiple_teachers/train_teachers.py
+4
-4
No files found.
differential_privacy/multiple_teachers/analysis.py
View file @
3f8ea5cb
...
...
@@ -216,10 +216,10 @@ def main(unused_argv):
# If we are reproducing results from paper https://arxiv.org/abs/1610.05755,
# download the required binaries with label information.
##################################################################
# Binaries for MNIST results
paper_binaries_mnist
=
\
[
"https://github.com/npapernot/multiple-teachers-for-privacy/blob/master/mnist_250_teachers_labels.npy?raw=true"
,
[
"https://github.com/npapernot/multiple-teachers-for-privacy/blob/master/mnist_250_teachers_labels.npy?raw=true"
,
"https://github.com/npapernot/multiple-teachers-for-privacy/blob/master/mnist_250_teachers_100_indices_used_by_student.npy?raw=true"
]
if
FLAGS
.
counts_file
==
"mnist_250_teachers_labels.npy"
\
or
FLAGS
.
indices_file
==
"mnist_250_teachers_100_indices_used_by_student.npy"
:
...
...
@@ -254,7 +254,7 @@ def main(unused_argv):
total_log_mgf_nm
=
np
.
array
([
0.0
for
_
in
l_list
])
total_ss_nm
=
np
.
array
([
0.0
for
_
in
l_list
])
noise_eps
=
FLAGS
.
noise_eps
for
i
in
indices
:
total_log_mgf_nm
+=
np
.
array
(
[
logmgf_from_counts
(
counts_mat
[
i
],
noise_eps
,
l
)
...
...
differential_privacy/multiple_teachers/deep_cnn.py
View file @
3f8ea5cb
...
...
@@ -95,9 +95,9 @@ def inference(images, dropout=False):
# conv1
with
tf
.
variable_scope
(
'conv1'
)
as
scope
:
kernel
=
_variable_with_weight_decay
(
'weights'
,
kernel
=
_variable_with_weight_decay
(
'weights'
,
shape
=
first_conv_shape
,
stddev
=
1e-4
,
stddev
=
1e-4
,
wd
=
0.0
)
conv
=
tf
.
nn
.
conv2d
(
images
,
kernel
,
[
1
,
1
,
1
,
1
],
padding
=
'SAME'
)
biases
=
_variable_on_cpu
(
'biases'
,
[
64
],
tf
.
constant_initializer
(
0.0
))
...
...
@@ -108,25 +108,25 @@ def inference(images, dropout=False):
# pool1
pool1
=
tf
.
nn
.
max_pool
(
conv1
,
ksize
=
[
1
,
3
,
3
,
1
],
pool1
=
tf
.
nn
.
max_pool
(
conv1
,
ksize
=
[
1
,
3
,
3
,
1
],
strides
=
[
1
,
2
,
2
,
1
],
padding
=
'SAME'
,
padding
=
'SAME'
,
name
=
'pool1'
)
# norm1
norm1
=
tf
.
nn
.
lrn
(
pool1
,
4
,
bias
=
1.0
,
alpha
=
0.001
/
9.0
,
norm1
=
tf
.
nn
.
lrn
(
pool1
,
4
,
bias
=
1.0
,
alpha
=
0.001
/
9.0
,
beta
=
0.75
,
name
=
'norm1'
)
# conv2
with
tf
.
variable_scope
(
'conv2'
)
as
scope
:
kernel
=
_variable_with_weight_decay
(
'weights'
,
kernel
=
_variable_with_weight_decay
(
'weights'
,
shape
=
[
5
,
5
,
64
,
128
],
stddev
=
1e-4
,
stddev
=
1e-4
,
wd
=
0.0
)
conv
=
tf
.
nn
.
conv2d
(
norm1
,
kernel
,
[
1
,
1
,
1
,
1
],
padding
=
'SAME'
)
biases
=
_variable_on_cpu
(
'biases'
,
[
128
],
tf
.
constant_initializer
(
0.1
))
...
...
@@ -137,18 +137,18 @@ def inference(images, dropout=False):
# norm2
norm2
=
tf
.
nn
.
lrn
(
conv2
,
4
,
bias
=
1.0
,
alpha
=
0.001
/
9.0
,
norm2
=
tf
.
nn
.
lrn
(
conv2
,
4
,
bias
=
1.0
,
alpha
=
0.001
/
9.0
,
beta
=
0.75
,
name
=
'norm2'
)
# pool2
pool2
=
tf
.
nn
.
max_pool
(
norm2
,
pool2
=
tf
.
nn
.
max_pool
(
norm2
,
ksize
=
[
1
,
3
,
3
,
1
],
strides
=
[
1
,
2
,
2
,
1
],
padding
=
'SAME'
,
strides
=
[
1
,
2
,
2
,
1
],
padding
=
'SAME'
,
name
=
'pool2'
)
# local3
...
...
@@ -156,9 +156,9 @@ def inference(images, dropout=False):
# Move everything into depth so we can perform a single matrix multiply.
reshape
=
tf
.
reshape
(
pool2
,
[
FLAGS
.
batch_size
,
-
1
])
dim
=
reshape
.
get_shape
()[
1
].
value
weights
=
_variable_with_weight_decay
(
'weights'
,
weights
=
_variable_with_weight_decay
(
'weights'
,
shape
=
[
dim
,
384
],
stddev
=
0.04
,
stddev
=
0.04
,
wd
=
0.004
)
biases
=
_variable_on_cpu
(
'biases'
,
[
384
],
tf
.
constant_initializer
(
0.1
))
local3
=
tf
.
nn
.
relu
(
tf
.
matmul
(
reshape
,
weights
)
+
biases
,
name
=
scope
.
name
)
...
...
@@ -167,9 +167,9 @@ def inference(images, dropout=False):
# local4
with
tf
.
variable_scope
(
'local4'
)
as
scope
:
weights
=
_variable_with_weight_decay
(
'weights'
,
weights
=
_variable_with_weight_decay
(
'weights'
,
shape
=
[
384
,
192
],
stddev
=
0.04
,
stddev
=
0.04
,
wd
=
0.004
)
biases
=
_variable_on_cpu
(
'biases'
,
[
192
],
tf
.
constant_initializer
(
0.1
))
local4
=
tf
.
nn
.
relu
(
tf
.
matmul
(
local3
,
weights
)
+
biases
,
name
=
scope
.
name
)
...
...
@@ -178,11 +178,11 @@ def inference(images, dropout=False):
# compute logits
with
tf
.
variable_scope
(
'softmax_linear'
)
as
scope
:
weights
=
_variable_with_weight_decay
(
'weights'
,
weights
=
_variable_with_weight_decay
(
'weights'
,
[
192
,
FLAGS
.
nb_labels
],
stddev
=
1
/
192.0
,
stddev
=
1
/
192.0
,
wd
=
0.0
)
biases
=
_variable_on_cpu
(
'biases'
,
biases
=
_variable_on_cpu
(
'biases'
,
[
FLAGS
.
nb_labels
],
tf
.
constant_initializer
(
0.0
))
logits
=
tf
.
add
(
tf
.
matmul
(
local4
,
weights
),
biases
,
name
=
scope
.
name
)
...
...
@@ -386,7 +386,7 @@ def train_op_fun(total_loss, global_step):
"""
# Variables that affect learning rate.
nb_ex_per_train_epoch
=
int
(
60000
/
FLAGS
.
nb_teachers
)
num_batches_per_epoch
=
nb_ex_per_train_epoch
/
FLAGS
.
batch_size
decay_steps
=
int
(
num_batches_per_epoch
*
FLAGS
.
epochs_per_decay
)
...
...
differential_privacy/multiple_teachers/input.py
View file @
3f8ea5cb
...
...
@@ -47,7 +47,7 @@ def create_dir_if_needed(dest_directory):
def
maybe_download
(
file_urls
,
directory
):
"""
Download a set of files in temporary local folder
:param directory: the directory where to download
:param directory: the directory where to download
:return: a tuple of filepaths corresponding to the files given as input
"""
# Create directory if doesn't exist
...
...
@@ -73,7 +73,7 @@ def maybe_download(file_urls, directory):
result
.
append
(
filepath
)
# Test if file already exists
if
not
gfile
.
Exists
(
filepath
):
if
not
tf
.
gfile
.
Exists
(
filepath
):
def
_progress
(
count
,
block_size
,
total_size
):
sys
.
stdout
.
write
(
'
\r
>> Downloading %s %.1f%%'
%
(
filename
,
float
(
count
*
block_size
)
/
float
(
total_size
)
*
100.0
))
...
...
@@ -124,7 +124,7 @@ def extract_svhn(local_url):
:return:
"""
with
gfile
.
Open
(
local_url
,
mode
=
'r'
)
as
file_obj
:
with
tf
.
gfile
.
Open
(
local_url
,
mode
=
'r'
)
as
file_obj
:
# Load MATLAB matrix using scipy IO
dict
=
loadmat
(
file_obj
)
...
...
differential_privacy/multiple_teachers/train_teachers.py
View file @
3f8ea5cb
...
...
@@ -64,11 +64,11 @@ def train_teacher(dataset, nb_teachers, teacher_id):
else
:
print
(
"Check value of dataset flag"
)
return
False
# Retrieve subset of data for this teacher
data
,
labels
=
input
.
partition_dataset
(
train_data
,
train_labels
,
nb_teachers
,
data
,
labels
=
input
.
partition_dataset
(
train_data
,
train_labels
,
nb_teachers
,
teacher_id
)
print
(
"Length of training data: "
+
str
(
len
(
labels
)))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment