Файл: Министерство науки и высшего образования российской федерации федеральное государственное автономное.pdf
Добавлен: 06.12.2023
Просмотров: 38
Скачиваний: 2
ВНИМАНИЕ! Если данный файл нарушает Ваши авторские права, то обязательно сообщите нам.
55 self.margin = margin def forward(self, output1, output2, label): euclidean_distance = F.pairwise_distance(output1, output2) loss_contrastive = torch.mean((1 - label) * torch.pow(euclidean_distance, 2) +
(label) * torch.pow(torch.clamp(self.margin - euclidean_distance, min=0.0), 2)) return loss_contrastive
DataLoders.py from torch.utils.data import Dataset from Preprocessing import invert_image_path, convert_to_image_tensor from sklearn.model_selection import train_test_split import pickle from random import randrange base_path_org = 'Datasets/cedar1/full_org/original_%d_%d.png' base_path_forg = 'Datasets/cedar1/full_forg/forgeries_%d_%d.png' def fix_pair(x, y): if x == y: return fix_pair(x, randrange(1, 24)) else: return x, y data = [] n_samples_of_each_class = 13500 for _ in range(n_samples_of_each_class): anchor_person = randrange(1, 55) anchor_sign = randrange(1, 24) pos_sign = randrange(1, 24) anchor_sign, pos_sign = fix_pair(anchor_sign, pos_sign) neg_sign = randrange(1, 24) positive = [base_path_org % (anchor_person, anchor_sign), base_path_org % (anchor_person, pos_sign), 1] negative = [base_path_org % (anchor_person, anchor_sign), base_path_forg %
(anchor_person, neg_sign), 0] data.append(positive) data.append(negative) train, test = train_test_split(data, test_size=0.15) with open('train_index.pkl', 'wb') as train_index_file: pickle.dump(train, train_index_file) with open('test_index.pkl', 'wb') as test_index_file: pickle.dump(test, test_index_file) class TrainDataset(Dataset): def __init__(self): with open('train_index.pkl', 'rb') as train_index_file: self.pairs = pickle.load(train_index_file) def __getitem__(self, index): item = self.pairs[index]
X = convert_to_image_tensor(invert_image_path(item[0]))
Y = convert_to_image_tensor(invert_image_path(item[1])) return [X, Y, item[2]]
56 def __len__(self): return len(self.pairs) class TestDataset(Dataset): def __init__(self): with open('test_index.pkl', 'rb') as test_index_file: self.pairs = pickle.load(test_index_file) def __getitem__(self, index): item = self.pairs[index]
X = convert_to_image_tensor(invert_image_path(item[0]))
Y = convert_to_image_tensor(invert_image_path(item[1])) return [X, Y, item[2]] def __len__(self): return len(self.pairs)
Test.py from Model import SiameseConvNet, distance_metric from torch import load import torch import numpy as np from Dataloaders import TestDataset from torch.utils.data import DataLoader device = torch.device('cpu') model = SiameseConvNet() model.load_state_dict(load(open('Models/model_large_epoch_20', 'rb'), map_location=device)) def compute_accuracy_roc(predictions, labels): dmax = np.max(predictions) dmin = np.min(predictions) nsame = np.sum(labels == 1) ndiff = np.sum(labels == 0) step = 0.001 max_acc = 0 d_optimal = 0 for d in np.arange(dmin, dmax + step, step): idx1 = predictions.ravel() <= d idx2 = predictions.ravel() > d tpr = float(np.sum(labels[idx1] == 1)) / nsame tnr = float(np.sum(labels[idx2] == 0)) / ndiff acc = 0.5 * (tpr + tnr) if acc > max_acc: max_acc = acc d_optimal = d return max_acc, d_optimal batch_avg_acc = 0 batch_avg_d = 0 n_batch = 0 def test(): model.eval() global batch_avg_acc, batch_avg_d, n_batch test_dataset = TestDataset()
57 loader = DataLoader(test_dataset, batch_size=8, shuffle=True) for batch_index, data in enumerate(loader):
A = data[0]
B = data[1] labels = data[2].long() f_a, f_b = model.forward(A, B) dist = distance_metric(f_a, f_b) acc, d = compute_accuracy_roc(dist.detach().numpy(), labels.detach().numpy()) print('Max accuracy for batch {} = {} at d = {}'.format(batch_index, acc, d)) batch_avg_acc += acc batch_avg_d += d n_batch += 1 print('CEDAR1:') test() print('Avg acc across all batches={} at d={}'.format(batch_avg_acc / n_batch, batch_avg_d / n_batch))
Train.py from torch import save model = SiameseConvNet() criterion = ContrastiveLoss() optimizer = Adam(model.parameters()) train_dataset = TrainDataset() train_loader = DataLoader(train_dataset, batch_size=16, shuffle=True) def checkpoint(epoch): file_path = "Models/model_epoch_%d" % epoch with open(file_path, 'wb') as f: save(model.state_dict(), f) def train(epoch): total_loss = 0 for batch_index, data in enumerate(train_loader):
A = data[0]
B = data[1] optimizer.zero_grad() label = data[2].float() f_A, f_B = model.forward(A, B) loss = criterion(f_A, f_B, label) total_loss += loss.item() print('Epoch {}, batch {}, loss={}'.format(epoch, batch_index, loss.item())) loss.backward() optimizer.step() print('Average epoch loss={}'.format(total_loss / (len(train_dataset) // 16))) for e in range(1, 21): train(e) checkpoint(e)
main.py import numpy from flask import Flask, request, render_template, send_from_directory, jsonify import sqlite3
58 from PIL import Image from Preprocessing import convert_to_image_tensor, invert_image import torch from Model import SiameseConvNet, distance_metric from io import BytesIO import json import math import sigex.signature_extractor.sample_project.Sigex as preproc import cv2 app = Flask(__name__, static_folder='./frontend/build/static', template_folder='./frontend/build') def load_model(): device = torch.device('cpu') model = SiameseConvNet().eval() model.load_state_dict(torch.load('Models/model_large_epoch_20', map_location=device)) return model def connect_to_db(): conn = sqlite3.connect('user_signatures.db') return conn def get_file_from_db(customer_id): cursor = connect_to_db().cursor() select_fname = """SELECT sign1,sign2,sign3 from signatures where customer_id = ?""" cursor.execute(select_fname, (customer_id,)) item = cursor.fetchone() cursor.connection.commit() return item def main():
CREATE_TABLE = """CREATE TABLE IF NOT EXISTS signatures (customer_id TEXT
PRIMARY KEY,sign1 BLOB, sign2 BLOB, sign3 BLOB)""" cursor = connect_to_db().cursor() cursor.execute(CREATE_TABLE) cursor.connection.commit()
# For heroku, remove this line. We'll use gunicorn to run the app app.run() # app.run(debug=True)
@app.route('/') def index(): return render_template('index.html')
@app.route('/upload', methods=['POST']) def upload(): file1 = request.files['uploadedImage1'] file2 = request.files['uploadedImage2'] file3 = request.files['uploadedImage3'] customer_id = request.form['customerID'] print(customer_id) try: conn = connect_to_db() cursor = conn.cursor() query = """DELETE FROM signatures where customer_id=?""" cursor.execute(query, (customer_id,)) cursor = conn.cursor() query = """INSERT INTO signatures VALUES(?,?,?,?)"""
59 cursor.execute(query, (customer_id, file1.read(), file2.read(), file3.read())) conn.commit() return jsonify({"error": False}) except Exception as e: print(e) return jsonify({"error": True})
@app.route('/verify', methods=['POST']) def verify(): try: customer_id = request.form['customerID'] input_image = Image.open(request.files['newSignature']).convert('RGB')
#PIL.Image to CV image input_image = numpy.array(input_image) input_image = input_image[:, :, ::-1].copy() input_image = preproc.SignatureExtraction(input_image)
#CV изображение в PIL.изобр. input_image = cv2.cvtColor(input_image, cv2.COLOR_BGR2RGB) input_image = Image.fromarray(input_image) input_image_tensor = convert_to_image_tensor(invert_image(input_image)).view(1,1,220,155) customer_sample_images = get_file_from_db(customer_id) if not customer_sample_images: return jsonify({'error':True}) anchor_images = [Image.open(BytesIO(x)).convert('RGB') for x in customer_sample_images] for x in anchor_images: x = numpy.array(x) x = x[:, :, ::-1].copy() #rgd to bgr x = preproc.SignatureExtraction(x) x = cv2.cvtColor(x, cv2.COLOR_BGR2RGB) x = Image.fromarray(x) anchor_image_tensors = [convert_to_image_tensor(invert_image(x)).view(-1, 1, 220,
155) for x in anchor_images] model = load_model() mindist = math.inf for anci in anchor_image_tensors: f_A, f_X = model.forward(anci, input_image_tensor) dist = float(distance_metric(f_A, f_X).detach().numpy()) mindist = min(mindist, dist) if dist <= 0.369907: #Avg acc across all batches=0.8022729407344791 at d=0.3699071424660876 return jsonify({"match": True, "error": False, "threshold":"%.6f" % (0.369907),
"distance":"%.6f"%(mindist)}) return jsonify({"match": False, "error": False, "threshold":0.369907,
"distance":round(mindist, 6)}) except Exception as e: print(e) return jsonify({"error":True})
@app.route("/manifest.json") def manifest():
60 return send_from_directory('./frontend/build', 'manifest.json')
@app.route("/favicon.ico") def favicon(): return send_from_directory('./frontend/build', 'favicon.ico' if __name__=='__main__': main()
1 2 3 4 5
Preprocessing.py from PIL import Image from PIL.ImageOps import invert import numpy as np from torch.tensor import Tensor def invert_image_path(path): image_file = Image.open(path) # open colour image image_file = image_file.convert('L').resize([220, 155]) image_file = invert(image_file) image_array = np.array(image_file) image_array[image_array>=50]=255 image_array[image_array<50]=0 return image_array def convert_to_image_tensor(image_array): image_array = image_array / 255.0 return Tensor(image_array).view(1, 220, 155) def show_inverted(path): img = Image.fromarray(invert_image_path(path)) img.show() def invert_image(image_file): image_file = image_file.convert('L').resize([220, 155]) image_file = invert(image_file) image_array = np.array(image_file) for i in range(image_array.shape[0]): for j in range(image_array.shape[1]): if image_array[i][j] <= 50: image_array[i][j] = 0 else: image_array[i][j] = 255 return image_array
61
Приложение С. Основные компоненты пользовательского
интерфейса
VerifyImageContainer.js
import React, { useState } from 'react'; import TextField from '../../Components/TextField/TextField'; import BrowseImages from '../../Components/BrowseImages/BrowseImages'; import ActionButton from '../../Components/ActionButton/ActionButton'; import Result from '../../Components/Result/Result' import './VerifyImageContainer.css' function VerifyImageContainer(props) { const [fileUrl, setFileUrl] = useState(new Array(props.imgLimit).fill(null)); const [filesValid, setFilesValid] = useState(false); const [threshold, setThreshold] = useState(undefined); const [distance, setDistance] = useState(undefined); const [customerID, setCustomerID] = useState(""); const [file, setFile] = useState(new Array(props.imgLimit).fill(undefined)); const [errorColor, setErrorColor] = useState("red"); const [errorText, setErrorText] = useState(""); const postData = (event) => { event.preventDefault(); if (filesValid) { setThreshold(undefined); setDistance(undefined); setErrorText("Loading...") setErrorColor("gray") const xhr = new XMLHttpRequest(); let formdata = new FormData(); formdata.append("newSignature", file[0]); formdata.append("customerID", customerID); xhr.open("POST", '/verify', true); xhr.onreadystatechange = function () { if (xhr.readyState === 4 && xhr.status === 200) { let res = JSON.parse(xhr.responseText); if (res.error == true) { setErrorText("Что-то пошло не так с сервером"); setErrorColor("red");} else {setErrorText(""); console.log(res.threshold) console.log(res.distance) setThreshold(parseFloat(res.threshold)) setDistance(parseFloat(res.distance)) }}}; xhr.send(formdata); }} return (
{props.headingText}
62
UploadImageContainer.js
import React, { useState } from 'react'; import TextField from '../../Components/TextField/TextField'; import BrowseImages from '../../Components/BrowseImages/BrowseImages'; import ActionButton from '../../Components/ActionButton/ActionButton'; import './UploadImageContainer.css' function UploadImageContainer(props) { const [fileUrl, setFileUrl] = useState(new Array(props.imgLimit).fill(undefined)); const [file, setFile] = useState(new Array(props.imgLimit).fill(undefined)); const [filesValid, setFilesValid] = useState(false); const [customerID, setCustomerID] = useState(""); const [errorColor, setErrorColor] = useState("red"); const [errorText, setErrorText] = useState(""); const postData = (event) => { event.preventDefault(); if (filesValid) {setErrorText("Waiting...") setErrorColor("gray") const xhr = new XMLHttpRequest(); let formdata = new FormData(); formdata.append("uploadedImage1", file[0]); formdata.append("uploadedImage2", file[1]); formdata.append("uploadedImage3", file[2]); formdata.append("customerID", customerID); xhr.open("POST", '/upload', true) xhr.onreadystatechange = function () { if (xhr.readyState === 4 && xhr.status === 200) { let res = JSON.parse(xhr.responseText); if (res.error==true) { setErrorText("Что-то пошло не так при загрузке изображений"); setErrorColor("red");} else {setErrorText("Изображения успешно загружены"); setErrorColor("green");}}}; xhr.send(formdata); }} return (
{props.headingText}