ComputationalCost.py 1.8 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from NNW import Uchi_tools, Adi_tools
from utils import *
import psutil
import time



if __name__ == '__main__':
    ###### Reproductibility
    torch.manual_seed(0)
    np.random.seed(0)


    reload = 'vgg16_Adi'
    model = tv.models.vgg16()
    model.classifier = nn.Linear(25088, 10)
    model.to(device)

    print("Initialization of the Neural Network Watermarking method: Adi et al. ")
    # watermarking section (change here to test another method) #######################################
    tools = Adi_tools()
    # watermarking section (END change here to test another method) ###################################
    watermarking_dict = np.load(reload + '_watermarking_dict.npy', allow_pickle=True).item()

    print("##### Doing computational complexity evaluation")
    # print("apply to: embedder")
    # trainset, testset, inference_transform = CIFAR10_dataset()
    # # initialisation
    # criterion = nn.CrossEntropyLoss()
    # batch_size = 128
    # learning_rate, momentum, weight_decay = 0.01, .9, 5e-4
    # trainloader, testloader = dataloader(trainset, testset, batch_size)
    # optimizer = optim.SGD([
    #     {'params': model.parameters()}
    # ], lr=learning_rate, momentum=momentum, weight_decay=weight_decay)
    # time_i = time.time()
    # tools.Embedder_one_step(model, trainloader, optimizer, criterion, watermarking_dict)
    # time_f = time.time()
    # step_time = time_f - time_i
    # print("Memory footprint to embed the watermark:", round(4831)," MB")
    # print("time of execution: %.5f s" %(27.92612))

    print("apply to: decoder")
    # time_i = time.time()
    # tools.Detector(model,watermarking_dict)
    # time_f = time.time()
    # detection_time=time_f - time_i
    print("Memory footprint to decode the watermark:",round(2721)," MB")
    print("time of execution: %.5f s" %(0.00100))