ConfidentialLCA/template/server.py

983 lines
32 KiB
Python
Executable File

#!/usr/bin/env python3
#############################################################################
# Copyright (c) 2019-2021 University of Exeter, UK
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#############################################################################
"""
Server component.
"""
import argparse
import time
import tempfile
import logging
import getopt
from datetime import datetime
import json
import os
import pexpect
import zipfile
import base64
import sys
from OpenSSL import crypto
from flask import (
Flask,
jsonify,
request,
make_response,
render_template,
redirect,
url_for,
session,
send_from_directory,
)
from werkzeug.utils import secure_filename
from string import Template
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
print(sys.path)
from pplca.config import (
const_log_format,
const_cert_dir,
const_data_dir,
const_mpc_program_dir,
const_mpc_program,
const_envflows_dir,
const_upload_dir,
)
from pplca.log_utils import log_info, log_debug, log_warning
from pplca.config import const_log_format, const_verbose
# cwd = os.getcwd()
cwd = os.path.dirname(os.path.realpath(__file__))
ALLOWED_EXTENSIONS = set(["py", "crt", "mpc", "csr"])
app = Flask(__name__)
comDB = [
{
"id": -1,
"name": "RootCompany",
# 'password':'Com1',
# 'serialnumber':'1001'
# Actually right now we have different comp running command
"computation running command": "./PlayerBinary.x your_id Program",
}
]
certificatesDB = [
{
"serial_number": 1000,
"cert_name": "RootCA.crt",
"uploaded_time": "2019/10/14 17:45:25",
}
]
@app.route("/")
def home():
if not session.get("logged_in"):
return render_template("login.html")
else:
return render_template("index.html")
@app.route("/comdb/company", methods=["GET"])
def getAllCom():
return jsonify({"comps": comDB})
@app.route("/certificatesdb/certlist/", methods=["GET"])
def sendCertList():
"""Send List of Signed Certificates to the client"""
certlist = {"CertNames": []}
try:
with open(cwd + "/config.json") as json_file:
data = json.load(json_file)
except IOError:
log_debug("(Sending Cert List) - Config file could not found in " + cwd)
certlist["CertNames"].append(data["Start"]["RootCAname"] + ".crt")
for name in data["Start"]["Certs"]["CertName"]:
certlist["CertNames"].append(name)
return jsonify({"certs": certlist})
@app.route("/comdb/company/<comId>", methods=["GET"])
def getCom(comId):
usr = [com for com in comDB if (com["id"] == comId)]
return jsonify({"com": usr})
@app.route("/computationId/<comName>", methods=["GET"])
def getID(comName):
# cwd=os.getcwd()
cwd = os.path.dirname(os.path.realpath(__file__))
try:
with open(cwd + "/config.json") as json_file:
data = json.load(json_file)
except IOError:
log_debug("Config file could not found in " + cwd)
counter = 0
for cert in data["Start"]["Certs"]["CertName"]:
if cert == comName:
log_debug("Company id " + str(counter) + " is sent to Company " + comName)
return str(counter)
else:
counter += 1
log_debug("Certificate " + comName + " could not be found.")
return "Could not find the certificate"
@app.route("/comdb/company/<comId>", methods=["PUT"])
def updateCom(comId):
company = [com for com in comDB if (com["id"] == comId)]
if "name" in request.json:
company[0]["name"] = request.json["name"]
log_info(
"Company with id "
+ str(comId)
+ " has changed the company name "
+ str(company[0]["name"])
)
log_debug(
"Company with id "
+ str(comId)
+ " has changed the company name "
+ str(company[0]["name"])
)
return jsonify({"com": company[0]})
@app.route("/comdb/company", methods=["POST"])
def createCom():
if not request.json or "name" not in request.json:
log_debug("It is not json request or it does not have the company name.")
os.abort(400)
data = request.json
for com in comDB:
if data["name"] == com["name"]:
return "This name exists."
dat = {
"id": comDB[-1]["id"] + 1,
"name": request.json["name"],
# we have different comp running command right now (docker)
"computation running command": "./PlayerBinary.x your_id Program",
}
comDB.append(dat)
log_info("New company " + str(data["name"]) + " is added to the computation.")
log_debug("Company " + str(data["name"]) + " is added to the computation.")
return jsonify(dat)
@app.route("/comdb/company/<comId>", methods=["DELETE"])
def deleteCom(comId):
company = [com for com in comDB if (com["id"] == comId)]
if len(company) == 0:
os.abort(404)
comDB.remove(company[0])
log_debug(
"Company " + str(company[0]["name"]) + " is deleted from the computation group"
)
log_info("Company " + str(company[0]["name"]) + " left the computation group")
return jsonify({"response": "Success"})
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({"error": "not found"}), 404)
def allowed_file(filename):
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route("/file", methods=["GET", "POST"])
def upload_file():
# cwd=os.getcwd()
cwd = os.path.dirname(os.path.realpath(__file__))
""""Link in order to upload a file"""
if request.method == "POST":
file = request.files["file"]
# check if the post request has the file part
if "file" not in request.files:
log_debug("File " + str(file) + " could not found!!!")
return redirect(request.url)
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == "":
log_debug("No selected file. Empty!!!")
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(const_upload_dir(cwd), filename))
return redirect(url_for("uploaded_file", filename=filename))
return """
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
"""
@app.route("/uploads/<filename>")
def uploaded_file(filename):
# cwd=os.getcwd()
cwd = os.path.dirname(os.path.realpath(__file__))
"""Link for the uploaded files"""
name, ext = os.path.splitext(filename)
log_debug(
"Uploaded file " + filename + ", file name " + name + " and extension " + ext
)
splitdata = name.split("_")
if ext == ".csr":
try:
with open(const_upload_dir(cwd) + "/" + filename, "rb") as my_cert_file:
my_cert_text = my_cert_file.read()
log_debug(str(my_cert_file) + " is read")
clientcert = crypto.load_certificate_request(
crypto.FILETYPE_PEM, my_cert_text
)
log_debug("Sign client certificate " + str(clientcert))
except IOError:
log_debug(
"Certificate "
+ filename
+ " could not found in "
+ const_upload_dir(cwd)
)
newfile = signCertificates(clientcert, splitdata[1])
newname = name + ".crt"
crtpath = const_cert_dir(cwd) + newname
log_debug("Storing CRT Stored Here :" + crtpath)
with open(crtpath, "wb") as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, newfile))
dat = {
"serial_number": newfile.get_serial_number(),
"cert_name": newname,
"uploaded_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
}
certificatesDB.append(dat)
return send_from_directory(const_upload_dir(cwd), filename)
def signCertificates(clientcert, name):
# cwd=os.getcwd()
cwd = os.path.dirname(os.path.realpath(__file__))
"""Sign Clients Certificates"""
my_cert_file = open(const_cert_dir(cwd) + name + ".crt", "rb")
my_cert_text = my_cert_file.read()
readablecert = crypto.load_certificate(crypto.FILETYPE_PEM, my_cert_text)
my_cert_file.close()
keyfile = open(const_cert_dir(cwd) + name + ".key", "rb")
mykey = keyfile.read()
readableprivatekey = crypto.load_privatekey(crypto.FILETYPE_PEM, mykey)
keyfile.close()
CAcert = crypto.X509()
log_info(certificatesDB[-1]["serial_number"] + 1)
CAcert.set_serial_number(certificatesDB[-1]["serial_number"] + 1)
CAcert.gmtime_adj_notBefore(0)
CAcert.gmtime_adj_notAfter(5 * 365 * 24 * 60 * 60)
CAcert.set_issuer(readablecert.get_subject())
CAcert.set_subject(clientcert.get_subject())
CAcert.set_pubkey(clientcert.get_pubkey())
CAcert.sign(readableprivatekey, "sha256")
log_info("Certificate of supplier company " + name + " is signed.")
return CAcert
@app.route("/cert/<filename>", methods=["GET"])
def sendBackSignedCertificate(filename):
# cwd=os.getcwd()
cwd = os.path.dirname(os.path.realpath(__file__))
"""Send the certificate to the client"""
path = const_cert_dir(cwd) + filename
with tempfile.NamedTemporaryFile() as tmp:
fileN = tmp.name
with open(fileN, "w") as writefile:
try:
with open(path, "rb") as my_cert_file:
writefile = my_cert_file.read()
except IOError:
log_debug(path + " could not found!")
log_debug(filename + " signed certificate is sent to the supplier company back.")
return writefile
def generateSelfSignedCert(cwd):
"""Self Signed Certificate"""
certname = input("RootCA name: \n")
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 4096)
CERT_FILE = certname + ".crt"
KEY_FILE = certname + ".key"
cert = crypto.X509()
if not (
os.path.exists(const_cert_dir(cwd) + CERT_FILE)
or (os.path.exists(const_cert_dir(cwd) + KEY_FILE))
):
# create a self-signed cert
cert.get_subject().C = input("Country: \n")
cert.get_subject().ST = input("State: \n")
cert.get_subject().L = input("Locality: \n")
cert.get_subject().O = input("Organization: \n")
cert.get_subject().OU = input("Organizational Unit: \n")
cert.get_subject().CN = certname
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(5 * 365 * 24 * 60 * 60)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(k)
cert.sign(k, "sha256")
log_debug("Creating Root CA for " + certname)
open(const_cert_dir(cwd) + CERT_FILE, "wb").write(
crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
)
open(const_cert_dir(cwd) + KEY_FILE, "wb").write(
crypto.dump_privatekey(crypto.FILETYPE_PEM, k)
)
log_info("Name of Root Certificate and key are " + certname)
else:
log_warning(
certname
+ "certificate-key pair is exists in directory "
+ const_cert_dir(cwd)
+ "!!"
)
def makeMPCconfigFile(rootdir):
try:
with open(rootdir + "/config.json") as json_file:
data = json.load(json_file)
except IOError:
log_debug("Could not find config.json file in directory " + rootdir)
comNumber = int(data["Start"]["NumberofPlayers"])
# input("How many companies will join the computation?(count yourself
# too) : ")
MPC_config = {"ScaleVector": [], "NameofEnvFlows": [], "NumofEnvFlows": []}
for i in range(comNumber):
if i == 0:
finalDemand = input(
"How many/much the product is produced by {} company? : ".format(
data["Start"]["Certs"]["CertName"][i]
)
)
MPC_config["ScaleVector"].append(str(finalDemand))
else:
value = input(
"How many/much the product is needed from {} company? ".format(
data["Start"]["Certs"]["CertName"][i]
)
)
MPC_config["ScaleVector"].append(str(value))
numEnvFlows = str(
input("How many environmental flows the system will have in the computation?: ")
)
MPC_config["NumofEnvFlows"].append(numEnvFlows)
for j in numEnvFlows:
envflow = input("Name of environmental flow")
MPC_config["NameofEnvFlows"].append(envflow)
try:
with open(rootdir + "MPC.json", "w") as MPCfile:
json.dump(MPC_config, MPCfile)
log_debug(
"Scale vector and environmental flow list used in MPC saved in directory "
+ rootdir
+ "/MPC.json"
)
except:
log_exception("MPC file (MPC.json) is NOT created! - " + rootdir)
def makeEnvFlowsListFile(enFlName, rootdir):
envFlowDict = {"NameofEnvFlow": []}
for i in enFlName:
envFlowDict["NameofEnvFlow"].append(i)
with open(rootdir + "/envflowslist.json", "w") as f:
json.dump(envFlowDict, f)
def createMPCFile(rootdir):
"""Rewritting the mpc file"""
try:
with open(rootdir + "/config.json") as json_file:
data = json.load(json_file)
except IOError:
log_debug("Could not find config.json file in directory " + rootdir)
if os.path.exists(rootdir + "/MPC.json"):
log_debug("MPC Config file exists! - " + rootdir)
else:
makeMPCconfigFile(rootdir)
try:
with open(rootdir + "/MPC.json") as MPCconfigFile:
MPCdata = json.load(MPCconfigFile)
except IOError:
log_debug("Could not find MPC.json file in " + rootdir)
try:
newfile = (
const_mpc_program_dir(rootdir)
+ "/"
+ const_mpc_program()
+ "/"
+ const_mpc_program()
+ ".mpc"
)
f = open(newfile, "wt")
comNumber = int(data["Start"]["NumberofPlayers"])
# input("How many companies will join the computation?(count yourself
# too) : ")
""""
scaleVector = ""
for i in range(comNumber):
# main company will be the first company in computation
if i == 0:
# finalDemand = input("How many/much the product is produced by
# {} company? : ".format(data['Start']['Certs']['CertName'][i]))
finalDemand = MPCdata["ScaleVector"][i]
scaleVector += str(finalDemand) + ","
elif i == (comNumber - 1):
# value = input("How many/much the product is needed from {}
# company? ".format(data['Start']['Certs']['CertName'][i]))
value =MPCdata["ScaleVector"][i]
scaleVector += str(value)
else:
# value=input("How many/much the product is needed from {}
# company? ".format(data['Start']['Certs']['CertName'][i]))
value = MPCdata["ScaleVector"][i]
scaleVector += str(value) + ","
"""
# numEnvFl = input("How many environmental flows the system will have
# in the computation?: ")
numEnvFl = MPCdata["NumofEnvFlows"][0]
enFlName = ""
for i in range(int(numEnvFl)):
# enFlName+="'"+input("Environmental flow name :")+"',"
enFlName += "'" + MPCdata["NameofEnvFlows"][i] + "',"
# enFlName+="'"+input("Environmental flow name :")+"'"
enFlName += "'" + MPCdata["NameofEnvFlows"][int(numEnvFl) - 1] + "'"
# create Environmental Flows json File for players
makeEnvFlowsListFile(MPCdata["NameofEnvFlows"], rootdir)
supplychainfile = (
rootdir + "/Program-Templates/SuppChainAgg/SuppChainAgg.mpc-template"
)
computationFile = open(supplychainfile)
src = Template(computationFile.read())
d = {
"numEnvFl": numEnvFl,
"enFlName": enFlName,
"companyNumber": comNumber,
# "scaleVector": scaleVector,
}
result = src.substitute(d)
f.write(result)
f.close()
log_debug("MPC file is created in directory " + newfile)
except:
log_exception("MPC file is NOT created - " + newfile)
compileMPCFile(rootdir)
def compileMPCFile(rootdir):
"""Compile the mpc file"""
try:
cmd = (
"podman run --cidfile "
+ rootdir
+ "/mpccontainerId -it --volume "
+ rootdir
+ "/Data:/opt/src/SCALE-MAMBA/Data --volume "
+ rootdir
+ "/Programs:/opt/src/SCALE-MAMBA/Programs -w /opt/src/SCALE-MAMBA/ localhost/scale-mamba-latest compile-new.sh "
+ const_mpc_program_dir()
+ "/"
+ const_mpc_program()
)
print(cmd)
with open("/tmp/cmd.txt", "w") as command:
command.write(cmd)
os.system(cmd)
log_debug("MPC file is compiled.")
log_info("MPC computation file SuppChainAgg.mpc is ready to be used.")
except:
log_exception("MPC file could NOT be compiled!")
try:
con_rm_cmd = (
"podman rm --cidfile "
+ rootdir
+ "/mpccontainerId && rm "
+ rootdir
+ "/mpccontainerId"
)
os.system(con_rm_cmd)
log_debug("MPC's container is removed! - " + rootdir)
except:
log_debug("MPC's container could NOT be removed! - " + rootdir)
def zipComputationFiles(path, ziph):
"""Zip and send the folder that contains computation files"""
for root, dirs, files in os.walk(path):
for f in files:
ziph.write(os.path.join(root, f), f, zipfile.ZIP_DEFLATED)
@app.route("/computationfile", methods=["GET"])
def sendComputationFile():
"""send MPC computation file to suppliers"""
dir = const_mpc_program_dir(cwd) + "/" + const_mpc_program()
with tempfile.NamedTemporaryFile(suffix="zip", delete=False) as tmp:
zipname = tmp.name
try:
with zipfile.ZipFile(zipname, "w") as zipf:
zipComputationFiles(dir, zipf)
except IOError:
log_debug(dir + " could not be found.")
with open(zipname, "rb") as zipfolder:
myzipfolder = base64.b64encode(zipfolder.read())
log_debug("MPC computation folder is sent to the supplier")
return myzipfolder
def zipEnvFile(path, ziph, envfile):
"""Zip and send the folder that contains computation files"""
ziph.write(os.path.join(path, envfile), envfile, zipfile.ZIP_DEFLATED)
@app.route("/envflowslistfile", methods=["GET"])
def sendEnvFlowsFile():
"""send Environmental Flows List file to suppliers"""
cwd = os.path.dirname(os.path.realpath(__file__))
# envdir = const_envflows_dir(cwd)
envfile = "envflowslist.json"
zipname = cwd + "/env-flows.zip"
try:
with zipfile.ZipFile(zipname, "w") as zipf:
zipEnvFile(cwd, zipf, envfile)
except IOError:
log_debug(envdir + " could not be found.")
print("Not found")
with open(zipname, "rb") as zipfolder:
myzipfolder = base64.b64encode(zipfolder.read())
log_debug("Environmental Flows List file is sent to the supplier")
return myzipfolder
def zipDataFolder(path, ziph, keyword):
"""Zip and send the Data folder(MAC key and other files)"""
keyword2 = "MKey"
keyword3 = keyword2 + "-" + str(keyword) + ".key"
for root, dirs, files in os.walk(path):
for file in files:
if keyword2 in file:
if keyword3 in file:
ziph.write(os.path.join(root, file), file, zipfile.ZIP_DEFLATED)
log_debug("MAC key of supplier is " + keyword3)
else:
ziph.write(os.path.join(root, file), file, zipfile.ZIP_DEFLATED)
@app.route("/datafolder/<comId>", methods=["GET"])
def sendDataFolder(comId):
"""send the files of Data folder to suppliers"""
with tempfile.NamedTemporaryFile(suffix="zip", delete=False) as tmp:
zipname = tmp.name
try:
with zipfile.ZipFile(zipname, "w") as zipf:
zipDataFolder(const_data_dir(cwd), zipf, comId)
except IOError:
log_debug(const_data_dir(cwd) + " folder could not be found.")
with open(zipname, "rb") as zipfolder:
myzipfolder = base64.b64encode(zipfolder.read())
log_debug("Data folder is sent to the supplier")
return myzipfolder
def makeConfigFile(rootComp, rootdir):
"""Make Configuration file - is used for set up step in Scale mamba"""
if os.path.exists(rootdir + "/config.json"):
log_debug("Config file exists! - " + rootdir)
else:
config = {"Start": {"Certs": {"IPAdd": [], "CertName": []}}}
config["Start"]["Set-up"] = str(
input(
"What do you want to set up? \n"
+ "1) Certs \n"
+ "2) Secret Sharing \n"
+ "3) Conversion circuit for LSSS<->GC computations \n"
+ "4) All three \n"
+ "Enter a number (1-4).. \n"
)
)
config["Start"]["RootCAname"] = input("RootCA of the computation \n")
numberofPlayers = input("Number of players \n")
config["Start"]["NumberofPlayers"] = str(numberofPlayers)
for i in range(numberofPlayers):
config["Start"]["Certs"]["IPAdd"] = input("IP Address \n")
config["Start"]["Certs"]["CertName"] = input("Which Certificate \n")
# They do not ask user anymore whether fakeOffline or fakeSacrifice
# config['Start']['FakeOffline'] = fp.readline().replace("\n", "")
# config['Start']['FakeSacrifice'] = fp.readline().replace("\n", "")
# which secret sharing scheme (in our case it is Shamir Secret Sharing)
config["Start"]["LSSS"] = "1"
# If you want to use other LSSS, you should modify your config file with requirements in SCALE-MAMBA
# + "0) Full Threshold \n"
# + "1) Shamir \n";
# + "2) Replicated \n";
# + "3) General Q2 MSP \n"
config["Start"]["Modulus"] = str(
input("What modulus do you want to use for secret sharing?")
)
config["Start"]["threshold"] = str(
input("Enter threshold 0 < t < " + str(numofComp / 2))
)
with open(rootdir + "/config.json", "w") as f:
json.dump(config, f)
log_debug(
"Config file is generated for setting up computation (for Data folder)-"
+ rootComp
+ " Root company (in directory "
+ rootdir
+ ")"
)
def createInitialFolders(cwd):
"""Initial Folders"""
os.makedirs(const_upload_dir(cwd), exist_ok=True)
os.makedirs(const_mpc_program_dir(cwd) + "/" + const_mpc_program(), exist_ok=True)
os.makedirs(const_cert_dir(cwd), exist_ok=True)
os.makedirs(const_data_dir(cwd), exist_ok=True)
log_debug("Creation of Initial folders is done.")
def setInitialSteps(cwd):
createInitialFolders(cwd)
generateSelfSignedCert(cwd)
log_info("Root company is ready to be connected !")
def settingupComputation(rootCompany, rootdir):
"""For Setting up the Scale mamba SetupBinary.x"""
if os.path.isfile(rootdir + "/timing_analysis_for_setup.json"):
with open(rootdir + "/timing_analysis_for_setup.json", "r") as readfile:
timingResults = json.load(readfile)
else:
timingResults = {"TimingAnalysis": []}
singleTest = {}
singleTest["1-Beginning-of-func"] = time.perf_counter()
# log_debug("Timing start-beginning 1 !!!! "+ str(time.perf_counter()))
makeConfigFile(rootCompany, rootdir)
log_info(
"The system is setting up the configuration of the computation...It can take several mins (estimated time 10-15 mins)."
)
try:
with open(rootdir + "/config.json") as json_file:
data = json.load(json_file)
except IOError:
log_debug("Could not find config.json file in " + rootdir)
# we can put here or when generating config file directly
if int(data["Start"]["NumberofPlayers"]) < 3:
log_error(
"You are trying to run a scenario that we do NOT support! Setup cannot run. This part of the computation you have "
+ "less than 3 participants. Therefore, this computation cannot be kept confidential."
)
sys.exit(2)
try:
# log_debug("Timing start-shell command 2 !!!! "+ str(time.perf_counter()))
shell_cmd = (
"podman run --cidfile "
+ rootdir
+ "/setupcontainerId -it --volume "
+ rootdir
+ "/Data:/opt/src/SCALE-MAMBA/Data --volume "
+ rootdir
+ "/Cert-Store:/opt/src/SCALE-MAMBA/Cert-Store -w /opt/src/SCALE-MAMBA/ localhost/scale-mamba-latest SetupBinary.x"
)
# log_debug("Timing start-shell 3 !!!! "+ str(time.perf_counter()))
singleTest["2-Before-run-cmd"] = time.perf_counter()
child = pexpect.spawn("/bin/bash", ["-c", shell_cmd], echo=False)
# log_debug("Timing start-inputs 4 !!!! "+ str(time.perf_counter()))
singleTest["3-After-run-cmd"] = time.perf_counter()
child.expect("Enter", timeout=30)
singleTest["4-expect-input-startsetup"] = time.perf_counter()
child.sendline(str(data["Start"]["Set-up"]))
# log_debug("Timing start-inputs 5 !!!! "+ str(time.perf_counter()))
child.expect("the root CA", timeout=30)
singleTest["5-expect-input-rootCAname"] = time.perf_counter()
child.sendline(str(data["Start"]["RootCAname"]))
child.expect("Number of players", timeout=30)
singleTest["6-expect-input-numofplayers"] = time.perf_counter()
child.sendline(str(data["Start"]["NumberofPlayers"]))
singleTest["7-expect-input-start-ipandcert"] = time.perf_counter()
for i in range(int(data["Start"]["NumberofPlayers"])):
child.expect(str(r"IP Address.*\n"), timeout=300)
child.sendline(str(data["Start"]["Certs"]["IPAdd"][i]))
child.expect("Name of certificate", timeout=30)
child.sendline((str(data["Start"]["Certs"]["CertName"][i])).rstrip())
singleTest["8-expect-input-end-ipandcert"] = time.perf_counter()
# SCALE-MAMBA not asking them anymore - but maybe in future they can...
# child.expect('Fake offline')
# child.sendline(data['Start']['FakeOffline'])
# child.expect('Fake sacrifice')
# child.sendline(data['Start']['FakeSacrifice'])
child.expect("Enter a number")
singleTest["9-expect-input-LSSS"] = time.perf_counter()
child.sendline(data["Start"]["LSSS"])
child.expect("secret sharing?")
singleTest["10-expect-input-secretsharing"] = time.perf_counter()
child.sendline(data["Start"]["Modulus"])
child.expect("Enter threshold", timeout=5000)
singleTest["11-expect-input-threshold"] = time.perf_counter()
child.sendline(str(data["Start"]["threshold"]))
child.wait()
log_info("Setup for the computation is completed(Data Folder).")
singleTest["12-expect-setupcompleted"] = time.perf_counter()
except:
print("Could NOT complete Setup!!!!")
try:
singleTest["13-expect-input-before-delcontainer"] = time.perf_counter()
# log_debug("Timing start-container 6 !!!! "+ str(time.perf_counter()))
con_rm_cmd = (
"podman rm --cidfile "
+ rootdir
+ "/setupcontainerId && rm "
+ rootdir
+ "/setupcontainerId"
)
os.system(con_rm_cmd)
log_debug("Setup's container is removed! - " + rootdir)
singleTest["14-expect-input-before-delcontainer"] = time.perf_counter()
# log_debug("Timing end-container 7 !!!! "+ str(time.perf_counter()))
except:
log_debug("Setup's container could NOT be removed! - " + rootdir)
singleTest["15-expect-input-before-delcontainer"] = time.perf_counter()
# log_debug("Timing end 1 !!!! "+ str(time.perf_counter()))
timingResults["TimingAnalysis"].append(singleTest)
with open(rootdir + "/timing_analysis_for_setup.json", "w") as outfile:
json.dump(timingResults, outfile)
def runServer(portNumber):
log_info("Open server for clients")
app.secret_key = os.urandom(12)
app.run(port=portNumber)
def helpmsg():
"""Print help message."""
print("crx-extract [OPTION] extid")
print(" -h print this help text")
print(" -s silent (no log messages)")
def main(conf):
retval = False
if conf.cmd == "InitializeSupplyChain":
retval = setInitialSteps(conf.rootDir)
elif conf.cmd == "Set-up":
retval = settingupComputation(conf.rootCompany, conf.rootDir)
elif conf.cmd == "MPCFile":
retval = createMPCFile(conf.rootDir)
elif conf.cmd == "RunningServer":
retval = runServer(conf.portNumber)
return retval
if __name__ == "__main__":
verbose = True
try:
opts, args = getopt.getopt(
sys.argv, "hsed:a:o:w", ["date=", "archive=", "output="]
)
except getopt.GetoptError:
helpmsg()
sys.exit(2)
for opt, arg in opts:
if opt == "-h":
helpmsg()
sys.exit()
elif opt == "-s":
verbose = False
if len(args) > 0:
extid = args[0]
else:
helpmsg()
sys.exit()
logger = logging.getLogger()
ch = logging.StreamHandler(sys.stderr)
ch.setFormatter(logging.Formatter(const_log_format("server.py")))
logger.addHandler(ch)
if verbose:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.WARNING)
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description="Server Service module.",
)
main_parser.add_argument(
"-c",
"--cmd",
metavar="cmd",
choices=["InitializeSupplyChain", "Set-up", "MPCFile", "RunningServer"],
default="InitializeSupplyChain",
help="Command:\n"
+ " InitializeSupplyChain: Create main folders and generate Root Certificate\n"
+ " Set-up: Set up required files and MAC-keys-Data Folder\n"
+ " MPCFile: Creation and Compiling of MPC File-Programs/SuppAggChain Folder\n"
+ " RunningServer: Serving the server to clients",
)
main_parser.add_argument(
"-p",
"--portNumber",
type=int,
default="4999",
help="give port number to server",
)
main_parser.add_argument(
"-rc",
"--rootCompany",
default="",
help="give root company name",
)
main_parser.add_argument(
"-root",
"--rootDir",
default="",
help="give the directory of root company",
)
main_parser.add_argument(
"-v",
"--verbose",
action="store_true",
default=const_verbose(),
help="increase verbosity",
)
main_conf = main_parser.parse_args()
sys.exit(main(main_conf))