Skip to content

Commit

Permalink
add more data
Browse files Browse the repository at this point in the history
  • Loading branch information
theyoucheng committed Mar 11, 2018
1 parent 4f13d4d commit 9e24a41
Show file tree
Hide file tree
Showing 34 changed files with 150,442 additions and 99 deletions.
115 changes: 115 additions & 0 deletions exps/exp5-3/SSC-top-weights/exp-ssc-k.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import sys
sys.path.insert(0, '../../../src/')
import random
import numpy as np
import json
import os
from datetime import datetime

from util import *
from nnett import *
from lp import *

def ssc_pair(nnet, I, J, K, test_data, di):

index=-1
tot=len(test_data[0].eval())

ordering=list(range(tot))
np.random.shuffle(ordering)

cex=False

while index<tot-1:

index+=1

X=test_data[0][ordering[index]].eval()
label=test_data[1][ordering[index]].eval()


label_, act=nnet.eval(list(X))

feasible, new_x, d, _, _=rp_ssc(I, J, K, nnet, X, act)

if feasible:
label__, act=nnet.eval(list(new_x))
if label_!=label__:
if label_==label or label__==label:
cex=True

return True, index, cex, d, label, label_, label__

if index>=40: break ##

return False, index, cex, -1, -1, -1, -1

def main():
kappa=10
di='../../random-nn/'
outs="./ssc-pairs"+str(datetime.now()).replace(' ','-')+'/'
os.system('mkdir -p {0}'.format(outs))
training_data, validation_data, test_data = mnist_load_data_shared(filename="../../data/mnist.pkl.gz")

nnindex=-1
with open(di+'README.txt') as f:
lines = f.readlines()
for line in lines:

nnindex+=1

fname=line.split()[0]
with open(di+'w_'+fname, "r") as infile:
weights=json.load(infile)
with open(di+'b_'+fname, "r") as infile:
biases=json.load(infile)

nnet=NNett(weights, biases)
N=len(nnet.weights)


s='Neural net tested: {0}\n'.format(fname)
fres=fname+'-results.txt'
f=open(outs+fres, "a")
f.write(s)
f.close()

ncex=0
covered=0
not_covered=0
i_begin=2
j_begin=0
k_begin=0
for I in range(i_begin, N): ## iterate each hidden layer
for K in range(k_begin, len(nnet.weights[I-1][0])):
## to find the top-kappa weights to node K
weights_to_k=[]
for J in range(0, len(nnet.weights[I-1])):
weights_to_k.append(abs(nnet.weights[I-1][J][K]))

top_kappa=[]
for ka in range(0, kappa):
_, J=max( (v, i) for i, v in enumerate(weights_to_k) )
top_kappa.append(J)
weights_to_k.pop(J)

for J in top_kappa: #range(j_begin, M):
found, tested, cex, d, label, label_, label__=ssc_pair(nnet, I-1, J, K, test_data, outs)
if found: covered+=1
else: not_covered+=1
if cex: ncex+=1
s='I-J-K: {0}-{1}-{2}, '.format(I-1, J, K)
s+='{0}, tested images: {1}, cex={9}, ncex={2}, covered={3}, not_covered={4}, d={5}, {6}:{7}-{8}\n'.format(found, tested, ncex, covered, not_covered, d, label, label_, label__, cex)
f=open(outs+fres, "a")
f.write(s)
f.close()
k_begin=0
j_begin=0
f=open(di+'results-ssc-kappa{0}.txt'.format(kappa), "a")
tot_pairs=covered+not_covered;
s='{0}: aac-coverage: {1}, CEX\%={2}, #CEX={3}, tot_pairs={4}, covered={5}, not-covered={6}\n'.format(fname, 1.0*covered/tot_pairs, 1.0*ncex/tot_pairs, ncex, tot_pairs, covered, not_covered)
f.write(s)


if __name__=="__main__":
main()
10 changes: 10 additions & 0 deletions exps/exp5-3/SSC-top-weights/results-kappa10.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
mnist_nnet_index0-67-22-63.txt: ssc-coverage: 0.998947368421, CEX\%=0.176842105263, #CEX=168, tot_pairs=950, covered=949, not-covered=1
mnist_nnet_index1-59-94-56-45.txt: ssc-coverage: 0.993658536585, CEX\%=0.0751219512195, #CEX=154, tot_pairs=2050, covered=2037, not-covered=13
mnist_nnet_index2-72-61-70-77.txt: ssc-coverage: 0.995412844037, CEX\%=0.0802752293578, #CEX=175, tot_pairs=2180, covered=2170, not-covered=10
mnist_nnet_index3-65-99-87-23-31.txt: ssc-coverage: 0.9636, CEX\%=0.0652, #CEX=163, tot_pairs=2500, covered=2409, not-covered=91
mnist_nnet_index4-49-61-90-21-48.txt: ssc-coverage: 0.921739130435, CEX\%=0.0839130434783, #CEX=193, tot_pairs=2300, covered=2120, not-covered=180
mnist_nnet_index5-97-83-32.txt: ssc-coverage: 1.0, CEX\%=0.064, #CEX=80, tot_pairs=1250, covered=1250, not-covered=0
mnist_nnet_index6-33-95-67-43-76.txt: ssc-coverage: 0.891065292096, CEX\%=0.0718213058419, #CEX=209, tot_pairs=2910, covered=2593, not-covered=317
mnist_nnet_index7-78-62-73-47.txt: ssc-coverage: 0.9984375, CEX\%=0.0885416666667, #CEX=170, tot_pairs=1920, covered=1917, not-covered=3
mnist_nnet_index8-87-33-62.txt: ssc-coverage: 1.0, CEX\%=0.127619047619, #CEX=134, tot_pairs=1050, covered=1050, not-covered=0
mnist_nnet_index9-76-55-74-98-75.txt: ssc-coverage: 0.899358974359, CEX\%=0.0641025641026, #CEX=200, tot_pairs=3120, covered=2806, not-covered=314
135 changes: 135 additions & 0 deletions exps/exp5-3/lp-call-runtime/exp-get-runtime.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@

import sys
sys.path.insert(0, '../../../src/')
import random
import numpy as np
import json
import os
import time
from datetime import datetime

from util import *
from nnett import *
from lp import *


def ssc_pair(nnet, I, J, K, test_data, di):

index=-1
tot=len(test_data[0].eval())

ordering=list(range(tot))
np.random.shuffle(ordering)

cex=False

while index<tot-1:

index+=1

X=test_data[0][ordering[index]].eval()
label=test_data[1][ordering[index]].eval()


label_, act=nnet.eval(list(X))

times=[]

start=time.time()
feasible, new_x, d, s1, s2=rp_ssc(I, J, K, nnet, X, act)
end=time.time()

times.append(end-start)

if feasible:
label__, act=nnet.eval(list(new_x))
if label==label_ or label==label__:
if label_!=label__:
cex=True

for i in range(0, 99):
start=time.time()
feasible, new_x, d, s1, s2=rp_ssc(I, J, K, nnet, X, act)
end=time.time()
times.append(end-start)

tot_time=0
for t in times:
tot_time+=t
tot_time=1.0*tot_time/len(times)


f=open(di+'results.txt'.format(label), "a")
#s='index: {0}\n'.format(index)
s='#vars: {0}, #constraints: {1}, #time: {2}\n'.format(s1, s2, tot_time)
f.write(s)
f.close()

return True, index, cex, d, label, label_, label__

if index>=40: break ##

return False, index, cex, -1, -1, -1, -1

def main():
di='../../random-nn/'
training_data, validation_data, test_data = mnist_load_data_shared(filename="../../data/mnist.pkl.gz")
nnindex=-1
with open(di+'README.txt') as f:
lines = f.readlines()
for line in lines:

nnindex+=1
if nnindex<7: continue

fname=line.split()[0]
with open(di+'w_'+fname, "r") as infile:
weights=json.load(infile)
with open(di+'b_'+fname, "r") as infile:
biases=json.load(infile)

nnet=NNett(weights, biases)
N=len(nnet.weights)

s='Neural net tested: {0}\n'.format(fname)
f=open('./results.txt', "a")
f.write(s)
f.close()

ncex=0
covered=0
not_covered=0
i_begin=1
j_begin=0
k_begin=0
flag=False
for I in range(i_begin, N-1): ## iterate each hidden layer
M=len(nnet.weights[I-1][0])
f=open('./results.txt', "a")
s='L{0}-{1}: '.format(I, I+1)
f.write(s)
for J in range(j_begin, M):
for K in range(k_begin, len(nnet.weights[I][0])):
flag=True
found, tested, cex, d, label, label_, label__=ssc_pair(nnet, I, J, K, test_data, './')
if found: covered+=1
else:
not_covered+=1
flag=False
if cex: ncex+=1
#s='I-J-K: {0}-{1}-{2}, '.format(I, J, K)
#s+='{0}, tested images: {1}, ncex={2}, covered={3}, not_covered={4}, d={5}, {6}:{7}-{8}\n'.format(found, tested, ncex, covered, not_covered, d, label, label_, label__)
#f=open(outs+'results.txt', "a")
#f.write(s)
#f.close()
if flag: break
k_begin=0
if flag: break
j_begin=0
#f=open(di+'results.txt', "a")
#s='{0}: mcdc-coverage: {1}, CEX={2}, covered={3}, not-covered={4}\n'.format(fname, 1.0*covered/(covered+not_covered), ncex, covered, not_covered)
#f.write(s)


if __name__=="__main__":
main()
Loading

0 comments on commit 9e24a41

Please sign in to comment.