Initial import (from the internal development repository).
This commit is contained in:
commit
e884a1a8ea
|
@ -0,0 +1,84 @@
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
FROM registry.hub.docker.com/library/debian:testing-slim
|
||||||
|
|
||||||
|
ARG USERNAME=lh
|
||||||
|
ARG USER_UID=1000
|
||||||
|
ARG USER_GID=$USER_UID
|
||||||
|
|
||||||
|
ENV LD_LIBRARY_PATH=/opt/local/lib/:${LD_LIBRARY_PATH}
|
||||||
|
ENV LIBRARY_PATH=/opt/local/lib/:${LIBRARY_PATH}
|
||||||
|
ENV C_INCLUDE_PATH=/opt/local/include/:${C_INCLUDE_PATH}
|
||||||
|
ENV CPLUS_INCLUDE_PATH=/opt/local/include/:${CPLUS_INCLUDE_PATH}
|
||||||
|
ENV PATH=/opt/local/bin:/opt/scale-mamba/:${PATH}
|
||||||
|
ENV SCALE_MAMBA=/opt/scale-mamba/
|
||||||
|
|
||||||
|
# Configure apt and install packages
|
||||||
|
RUN mkdir -p /usr/share/man/man1
|
||||||
|
RUN apt-get update \
|
||||||
|
#
|
||||||
|
# Upgrade base image to ensure that all security updates are installed
|
||||||
|
# && apt-get -y dist-upgrade \
|
||||||
|
#
|
||||||
|
# Ensure availability of common tools
|
||||||
|
&& apt-get -y install libssl-dev curl git openssh-client less iproute2 procps lsb-release \
|
||||||
|
#
|
||||||
|
# Install required develeopment tools (e.g., g++, python 2)
|
||||||
|
&& apt-get -y install m4 g++ yasm git unzip gcc libssl-dev make libcrypto++-dev python \
|
||||||
|
#
|
||||||
|
# Install MPIR 3.0.0
|
||||||
|
&& mkdir -p /opt/src/ \
|
||||||
|
&& cd /opt/src && curl http://mpir.org/mpir-3.0.0.tar.bz2 | tar xjf - \
|
||||||
|
&& cd /opt/src/mpir-3.0.0 && ./configure --enable-cxx --prefix=/opt/local && make -j 4 && make -j 4 check && make -j 4 install \
|
||||||
|
#
|
||||||
|
# Install Cryptopp 820
|
||||||
|
&& mkdir -p /opt/src/ && cd /opt/src \
|
||||||
|
&& LD_LIBRARY_PATH= curl -O https://www.cryptopp.com/cryptopp820.zip && mkdir cryptopp820 \
|
||||||
|
&& cd /opt/src/cryptopp820 && unzip ../cryptopp820.zip && rm ../cryptopp820.zip && make -j 4 && make -j 4 libcryptopp.so && PREFIX=/opt/local make -j 4 install \
|
||||||
|
#
|
||||||
|
# Requirements for remote sharing
|
||||||
|
&& apt-get -y install libicu[0-9][0-9] libkrb5-3 zlib1g gnome-keyring libsecret-1-0 desktop-file-utils x11-utils openssl \
|
||||||
|
# Install SCALE-MAMBA
|
||||||
|
&& LD_LIBRARY_PATH= git clone https://github.com/KULeuven-COSIC/SCALE-MAMBA.git /opt/src/SCALE-MAMBA \
|
||||||
|
&& cd /opt/src/SCALE-MAMBA \
|
||||||
|
# Install SCALE-MAMBA version 1.5 (git commit d7c960afd0a9776f04e15a5653caf300dd42f20a)
|
||||||
|
&& git checkout d7c960afd0a9776f04e15a5653caf300dd42f20a \
|
||||||
|
&& cp CONFIG CONFIG.mine && echo ROOT=`pwd` >> CONFIG.mine \
|
||||||
|
&& sed -i -e 's/PRSS.cpp/PRSS.h/' src/Offline/DABitGenerator.cpp \
|
||||||
|
&& make -j 4 progs \
|
||||||
|
# install
|
||||||
|
&& mkdir -p /opt/scale-mamba \
|
||||||
|
&& cp Player.x /opt/scale-mamba/ \
|
||||||
|
&& cp Setup.x /opt/scale-mamba/ \
|
||||||
|
&& cp src/libMPC.a /opt/scale-mamba/ \
|
||||||
|
&& cp compile.py /opt/scale-mamba/ \
|
||||||
|
&& cp -r Compiler /opt/scale-mamba/ \
|
||||||
|
#
|
||||||
|
# Create a non-root user to use if preferred
|
||||||
|
&& groupadd --gid $USER_GID $USERNAME \
|
||||||
|
&& useradd -ms /bin/bash -o --uid $USER_UID --gid $USER_GID -m $USERNAME \
|
||||||
|
#
|
||||||
|
# Clean up
|
||||||
|
&& apt-get autoremove -y \
|
||||||
|
&& apt-get clean -y \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
USER $USERNAME
|
||||||
|
ENV HOME /home/$USERNAME
|
||||||
|
WORKDIR /opt/scale-mamba
|
|
@ -0,0 +1,34 @@
|
||||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||||
|
// https://github.com/microsoft/vscode-dev-containers/tree/v0.154.2/containers/debian
|
||||||
|
{
|
||||||
|
"name": "logicalhacking/scale_mamba",
|
||||||
|
"build": {
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
},
|
||||||
|
|
||||||
|
// Set *default* container specific settings.json values on container create.
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.shell.linux": "/bin/bash"
|
||||||
|
},
|
||||||
|
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": [
|
||||||
|
"ms-vscode.cpptools",
|
||||||
|
"ms-vscode.cpptools-extension-pack",
|
||||||
|
"ms-vscode.cmake-tools",
|
||||||
|
"austin.code-gnu-global",
|
||||||
|
"ms-python.python"
|
||||||
|
],
|
||||||
|
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
// "forwardPorts": [],
|
||||||
|
|
||||||
|
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
|
||||||
|
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
|
||||||
|
|
||||||
|
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
|
||||||
|
"runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ],
|
||||||
|
|
||||||
|
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||||
|
"remoteUser": "lh"
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
.idea
|
||||||
|
__pycache__
|
||||||
|
template/mpc
|
||||||
|
P*
|
||||||
|
.vscode
|
||||||
|
|
|
@ -0,0 +1,202 @@
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright {yyyy} {name of copyright owner}
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
|
@ -0,0 +1,73 @@
|
||||||
|
# Confidentiality Enhanced Life-Cycle Assessment
|
||||||
|
|
||||||
|
|
||||||
|
ConfidentialLCA is a prototype demonstrating the use of Secure
|
||||||
|
Multi-Party Computation (SMPC) for Life-Cycle Assessment (LCA) of
|
||||||
|
supply chain.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
ConfidentialLCA requires [Pyton](https://www.python.org/) 3 and
|
||||||
|
[Podman](https://podman.io/).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
First, you need to install a few Python libraries. For this, please
|
||||||
|
execute:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip3 install --user -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Second, you need to build a podman container. For this, please
|
||||||
|
execute:
|
||||||
|
|
||||||
|
``` shell
|
||||||
|
podman build -t scale-mamba-latest .devcontainer
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Preparing the PKI Infrastructure
|
||||||
|
|
||||||
|
To generate the required X.509 certificates for executing the example
|
||||||
|
case study scenarios, please execute
|
||||||
|
|
||||||
|
```shell
|
||||||
|
./generate-all-certificates.sh <number_of_companies>
|
||||||
|
```
|
||||||
|
|
||||||
|
Where ``<number_of_companies>`` is the maximum number of companies of
|
||||||
|
all scenarios.
|
||||||
|
|
||||||
|
### Running LCA
|
||||||
|
|
||||||
|
For various different scenarios, the LCA can be run using the provided
|
||||||
|
script for running our default benchmarks:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
generate_and_run_test_scenario.py <scenario>
|
||||||
|
```
|
||||||
|
Where ``<scenario>`` is an integer between 0 and 2 that allows for
|
||||||
|
selecting the structure of the supply chain.
|
||||||
|
|
||||||
|
## Authors
|
||||||
|
|
||||||
|
* [Achim D. Brucker](http://www.brucker.ch/)
|
||||||
|
* [Sakine Yalman](http://emps.exeter.ac.uk/computer-science/staff/sy359)
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under an Apache 2.0 license.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
## Main Repository
|
||||||
|
|
||||||
|
The main git repository for this project is hosted by the [Software
|
||||||
|
Assurance & Security Research Team](https://logicalhacking.com) at
|
||||||
|
<https://git.logicalhacking.com/PrivacyPreservingLCA/ConfidentialLCA>.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
NUMBEROFCOMP=$1
|
||||||
|
|
||||||
|
if (: </dev/tcp/127.0.0.1/4999) &>/dev/null
|
||||||
|
then
|
||||||
|
echo "Server still running ..."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d "companies" ]; then
|
||||||
|
rm -rf companies
|
||||||
|
echo "existed companies folder is deleted"
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p companies
|
||||||
|
for i in $(seq $NUMBEROFCOMP); do
|
||||||
|
mkdir -p companies/P$i
|
||||||
|
cp -r ./template ./companies/P$i/Root
|
||||||
|
#mv ./companies/P$i/mpc/* ./companies/P$i
|
||||||
|
#rm -rf ./companies/P$i/mpc
|
||||||
|
done
|
||||||
|
|
||||||
|
for i in $(seq $NUMBEROFCOMP); do
|
||||||
|
ROOTCA=P$i
|
||||||
|
echo "Generating root certificate in directory $ROOTCA"
|
||||||
|
echo -e "$ROOTCA\nUK\nDevon\nExeter\nExeUni\nCS\n" | (./companies/P$i/Root/server.py -c InitializeSupplyChain -p 4999 -root ./companies/P$i/Root)
|
||||||
|
done
|
||||||
|
for i in $(seq $NUMBEROFCOMP); do
|
||||||
|
ROOTCA=P$i
|
||||||
|
(cd ./companies/$ROOTCA/Root; ./server.py -c RunningServer -p 4999 & echo "$!" > server.pid)
|
||||||
|
sleep 10
|
||||||
|
for j in $(seq $NUMBEROFCOMP); do
|
||||||
|
PLAYER="P${j}_${ROOTCA}"
|
||||||
|
echo "$PLAYER"
|
||||||
|
echo "Starting Player in directory P$j"
|
||||||
|
echo -e "$PLAYER\nUK\nDevon\nExeter\nExeUni\n$PLAYER\n$PLAYER@exeter.ac.uk\n" | (cd ./companies/P$j/Root; ./boarding.py joinSupplyChain )
|
||||||
|
done
|
||||||
|
pkill -F ./companies/$ROOTCA/Root/server.pid
|
||||||
|
done
|
||||||
|
|
||||||
|
[ ! "$(podman pod ps | grep mypod)" ] && podman run --pod new:mypod -t scale-mamba-latest true
|
||||||
|
|
||||||
|
exit
|
|
@ -0,0 +1,943 @@
|
||||||
|
#!/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import multiprocessing
|
||||||
|
import logging
|
||||||
|
import getopt
|
||||||
|
from datetime import datetime, date
|
||||||
|
import concurrent.futures # nice future but it works late than old process version !?
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import pexpect
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
|
import random
|
||||||
|
from multiprocessing import Process
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
from template.server import settingupComputation, createMPCFile, runServer
|
||||||
|
from template.computation import start_computation, update_env_flows_list
|
||||||
|
from pplca.compute import get_env_flows_list_file, get_computation_id, run_computation
|
||||||
|
from pplca.log_utils import log_debug, log_info, log_warning, log_error, log_exception
|
||||||
|
from pplca.config import (
|
||||||
|
const_log_format,
|
||||||
|
const_verbose,
|
||||||
|
const_cert_dir,
|
||||||
|
const_data_dir,
|
||||||
|
const_mpc_program_dir,
|
||||||
|
const_mpc_program,
|
||||||
|
const_upload_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_companies_to_test_directory(compList, envFlowList, main_dir, test_path):
|
||||||
|
# ready to use companies' paths
|
||||||
|
comp_Path = main_dir + "/companies"
|
||||||
|
for i in compList:
|
||||||
|
# First copy the folder and files of the company
|
||||||
|
shutil.copytree((comp_Path + "/" + i), (test_path + "/" + i))
|
||||||
|
testComPath = str(test_path + "/" + i)
|
||||||
|
generate_random_values_for_envflows(testComPath, envFlowList)
|
||||||
|
log_debug(
|
||||||
|
"Copied needed number("
|
||||||
|
+ str(len(compList))
|
||||||
|
+ ") of companies from "
|
||||||
|
+ comp_Path
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def give_name_to_companies(numComp):
|
||||||
|
|
||||||
|
compArray = []
|
||||||
|
for i in range(numComp):
|
||||||
|
compArray.append("P" + str(i + 1))
|
||||||
|
|
||||||
|
return compArray
|
||||||
|
|
||||||
|
|
||||||
|
# Ideal full tree
|
||||||
|
def ideal_subranges(nodes, deg):
|
||||||
|
result = []
|
||||||
|
(d, r) = divmod(len(nodes), deg)
|
||||||
|
cnt = 0
|
||||||
|
for i in range(deg):
|
||||||
|
if i < r:
|
||||||
|
result.append(nodes[cnt : cnt + d + 1])
|
||||||
|
cnt += d + 1
|
||||||
|
else:
|
||||||
|
result.append(nodes[cnt : cnt + d])
|
||||||
|
cnt += d
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# Fill from right
|
||||||
|
def first_right_fill_subranges(nodes, deg):
|
||||||
|
result = []
|
||||||
|
cnt = 0
|
||||||
|
numNodes = len(nodes)
|
||||||
|
for i in range(deg):
|
||||||
|
if i < (deg - 1):
|
||||||
|
result.append(nodes[cnt : cnt + 1])
|
||||||
|
cnt += 1
|
||||||
|
else:
|
||||||
|
result.append(nodes[cnt:numNodes])
|
||||||
|
cnt += numNodes
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def mk_supply_chain_structure(nodes, deg, treeStructure):
|
||||||
|
# head and tail
|
||||||
|
head, *tail = nodes
|
||||||
|
# str(head) + " " + str(tail)
|
||||||
|
# I think it should be '<='
|
||||||
|
if len(tail) <= deg:
|
||||||
|
return {head: tail}
|
||||||
|
else:
|
||||||
|
if treeStructure == 0:
|
||||||
|
x = ideal_subranges(tail, deg)
|
||||||
|
elif treeStructure == 1:
|
||||||
|
x = first_right_fill_subranges(tail, deg)
|
||||||
|
else:
|
||||||
|
log_error(
|
||||||
|
"There is no identified structure with this argument parameter: "
|
||||||
|
+ str(treeStructure)
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
head: list(
|
||||||
|
map(lambda I: mk_supply_chain_structure(I, deg, treeStructure), x)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def mk_config_file(rootCA, computationArray, rootdir):
|
||||||
|
"Make Configuration file - is used for set up step in Scale mamba"
|
||||||
|
|
||||||
|
numofComp = len(computationArray)
|
||||||
|
|
||||||
|
config = {"Start": {"Certs": {"IPAdd": [], "CertName": []}}}
|
||||||
|
config["Start"]["Set-up"] = "4"
|
||||||
|
config["Start"]["RootCAname"] = rootCA
|
||||||
|
config["Start"]["NumberofPlayers"] = str(numofComp)
|
||||||
|
for item in computationArray:
|
||||||
|
player = item + "_" + rootCA + ".crt"
|
||||||
|
config["Start"]["Certs"]["IPAdd"].append("127.0.0.1")
|
||||||
|
config["Start"]["Certs"]["CertName"].append(player)
|
||||||
|
|
||||||
|
# They do not ask user anymore whether fakeOffline or fakeSacrifice
|
||||||
|
# config['Start']['FakeOffline'] = fp.readline().replace("\n", "")
|
||||||
|
# config['Start']['FakeSacrifice'] = fp.readline().replace("\n", "")
|
||||||
|
# which secret sharing scheme (in our case it is Shamir Secret Sharing)
|
||||||
|
config["Start"]["LSSS"] = "1"
|
||||||
|
config["Start"]["Modulus"] = "340282366920938463463374607431768211507"
|
||||||
|
|
||||||
|
if (numofComp % 2) == 0:
|
||||||
|
config["Start"]["threshold"] = str((numofComp / 2) - 1)
|
||||||
|
else:
|
||||||
|
config["Start"]["threshold"] = str(int(numofComp / 2))
|
||||||
|
try:
|
||||||
|
with open(rootdir + "/config.json", "w") as f:
|
||||||
|
json.dump(config, f)
|
||||||
|
|
||||||
|
log_debug(
|
||||||
|
"Config file is generated for setting up computation (for Data folder)-"
|
||||||
|
+ rootCA
|
||||||
|
+ " Root company (in directory "
|
||||||
|
+ rootdir
|
||||||
|
+ ")"
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
log_exception(
|
||||||
|
"Setup Configuration file " + rootdir + "config.json is NOT created!"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def mk_MPC_config_file(scaleVector, envFlowList, rootdir):
|
||||||
|
|
||||||
|
MPC_config = {"ScaleVector": [], "NameofEnvFlows": [], "NumofEnvFlows": []}
|
||||||
|
|
||||||
|
for value in scaleVector:
|
||||||
|
MPC_config["ScaleVector"].append(str(value))
|
||||||
|
|
||||||
|
for flow in envFlowList:
|
||||||
|
MPC_config["NameofEnvFlows"].append(flow)
|
||||||
|
|
||||||
|
MPC_config["NumofEnvFlows"].append(str(len(envFlowList)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(rootdir + "/MPC.json", "w") as MPCfile:
|
||||||
|
json.dump(MPC_config, MPCfile)
|
||||||
|
|
||||||
|
log_debug(
|
||||||
|
"Scale vector and environmental flow list used in MPC saved in directory "
|
||||||
|
+ rootdir
|
||||||
|
+ "/MPC.json"
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
log_exception("MPC file (MPC.json) is NOT created! - " + rootdir)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_random_values_for_envflows(compDir, envFlowList):
|
||||||
|
|
||||||
|
# Generate initial values of env flows
|
||||||
|
envFlowDict = {"EnvFlows": {"NameofEnvFlow": [], "ValueofEnvFlow": []}}
|
||||||
|
|
||||||
|
for i in envFlowList:
|
||||||
|
envFlowDict["EnvFlows"]["NameofEnvFlow"].append(i)
|
||||||
|
envFlowDict["EnvFlows"]["ValueofEnvFlow"].append(str(random.randint(5, 10)))
|
||||||
|
|
||||||
|
with open(compDir + "/env-flows.json", "w") as f:
|
||||||
|
json.dump(envFlowDict, f)
|
||||||
|
|
||||||
|
|
||||||
|
def multi_process_computation(i, CERT, port, maindir, supplier_dir_name):
|
||||||
|
comp_supp_dir = maindir + "/" + i + "/" + supplier_dir_name
|
||||||
|
comp_dir = maindir + "/" + i
|
||||||
|
log_debug(
|
||||||
|
"Company "
|
||||||
|
+ i
|
||||||
|
+ "("
|
||||||
|
+ comp_supp_dir
|
||||||
|
+ ") join the computation with "
|
||||||
|
+ CERT
|
||||||
|
+ " certificate."
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_computation(
|
||||||
|
i, supplier_dir_name, comp_supp_dir, comp_dir, CERT, str(port), False
|
||||||
|
)
|
||||||
|
log_info(
|
||||||
|
"Inside of multiprocessing -- cert "
|
||||||
|
+ CERT
|
||||||
|
+ " comp "
|
||||||
|
+ i
|
||||||
|
+ " and port number "
|
||||||
|
+ str(port)
|
||||||
|
)
|
||||||
|
log_debug("And create output in " + comp_supp_dir + "/" + i + ".out")
|
||||||
|
except:
|
||||||
|
log_exception("Could NOT run MPC computation!!!!")
|
||||||
|
|
||||||
|
|
||||||
|
def existing_scenario_computation(
|
||||||
|
i, CERT, serverPort, port, maindir, supplier_dir_name
|
||||||
|
):
|
||||||
|
comp_supp_dir = maindir + "/" + i + "/" + supplier_dir_name
|
||||||
|
comp_dir = maindir + "/" + i
|
||||||
|
log_debug(
|
||||||
|
"Company "
|
||||||
|
+ i
|
||||||
|
+ "("
|
||||||
|
+ comp_supp_dir
|
||||||
|
+ ") join the computation with "
|
||||||
|
+ CERT
|
||||||
|
+ " certificate."
|
||||||
|
)
|
||||||
|
|
||||||
|
com_id = get_computation_id(serverPort, CERT)
|
||||||
|
log_debug("Company " + i + " - id is " + str(com_id))
|
||||||
|
try:
|
||||||
|
run_computation(i, comp_supp_dir, comp_dir, com_id, str(port), False)
|
||||||
|
log_info(
|
||||||
|
"Inside of multiprocessing -- cert "
|
||||||
|
+ CERT
|
||||||
|
+ " comp "
|
||||||
|
+ i
|
||||||
|
+ " and port number "
|
||||||
|
+ str(port)
|
||||||
|
)
|
||||||
|
log_debug("And create output in " + comp_supp_dir + "/" + i + ".out")
|
||||||
|
except:
|
||||||
|
log_exception("Could NOT run MPC computation!!!!")
|
||||||
|
|
||||||
|
|
||||||
|
def get_list_of_comp_env_flows_value(mainDir, computationArray):
|
||||||
|
|
||||||
|
envFlowsList = []
|
||||||
|
|
||||||
|
for i in computationArray:
|
||||||
|
compDir = mainDir + "/" + i
|
||||||
|
with open(compDir + "/env-flows.json", "r") as readfile:
|
||||||
|
envFlValues = json.load(readfile)
|
||||||
|
envFlowsList.append(envFlValues["EnvFlows"]["ValueofEnvFlow"])
|
||||||
|
|
||||||
|
return envFlowsList
|
||||||
|
|
||||||
|
|
||||||
|
def update_validaiton_file(
|
||||||
|
maindir,
|
||||||
|
level,
|
||||||
|
computationArray,
|
||||||
|
envFlowValueList,
|
||||||
|
rootCA,
|
||||||
|
serverPort,
|
||||||
|
computationPort,
|
||||||
|
):
|
||||||
|
|
||||||
|
if os.path.exists(maindir + "/" + rootCA + "/Root/MPC.json"):
|
||||||
|
with open(maindir + "/" + rootCA + "/Root/MPC.json") as MPCconfigFile:
|
||||||
|
MPCdata = json.load(MPCconfigFile)
|
||||||
|
|
||||||
|
with open(maindir + "/" + rootCA + "/env-flows.json", "r") as rootfile:
|
||||||
|
rootenvflowfile = json.load(rootfile)
|
||||||
|
|
||||||
|
with open(maindir + "/computation_validation_info.json", "r") as readfile:
|
||||||
|
feeds = json.load(readfile)
|
||||||
|
|
||||||
|
dt_string = datetime.today().isoformat()
|
||||||
|
|
||||||
|
feeds["one_level_computations"].append(
|
||||||
|
{
|
||||||
|
"level_info": level,
|
||||||
|
"server_port": serverPort,
|
||||||
|
"computation_port": computationPort,
|
||||||
|
"companies": str(computationArray),
|
||||||
|
"scale vector": MPCdata["ScaleVector"],
|
||||||
|
"list_of_env_flows_values_of_companies": envFlowValueList,
|
||||||
|
"computation_result": rootenvflowfile["EnvFlows"]["ValueofEnvFlow"],
|
||||||
|
"The time of computation ": dt_string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(maindir + "/computation_validation_info.json", "w") as outfile1:
|
||||||
|
json.dump(feeds, outfile1)
|
||||||
|
|
||||||
|
|
||||||
|
def run_each_compilation_parallel(
|
||||||
|
computationArray, scaleVector, serverPort, rootComp, maindir, envFlowList, level
|
||||||
|
):
|
||||||
|
|
||||||
|
if len(computationArray) > 2:
|
||||||
|
log_info(
|
||||||
|
"One level computation will happen between "
|
||||||
|
+ str(computationArray)
|
||||||
|
+ " \n And their values in scale vector are "
|
||||||
|
+ str(scaleVector)
|
||||||
|
+ " \n And server port is: "
|
||||||
|
+ str(serverPort)
|
||||||
|
)
|
||||||
|
rootCA = rootComp
|
||||||
|
rootdir = maindir + "/" + rootCA + "/Root"
|
||||||
|
log_debug("RootCA directory of the computation is " + rootdir)
|
||||||
|
|
||||||
|
# generate config file to give info about Setup instructions
|
||||||
|
mk_config_file(rootCA, computationArray, rootdir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
log_info(
|
||||||
|
"Setup is started - "
|
||||||
|
+ rootdir
|
||||||
|
+ "."
|
||||||
|
+ " Please wait... Setup can take several mins (estimated time 10-15 mins). "
|
||||||
|
+ "It can take even longer if there are parallel operations!"
|
||||||
|
)
|
||||||
|
setup_start_time = time.perf_counter()
|
||||||
|
settingupComputation(rootComp, rootdir)
|
||||||
|
log_info("Setup is completed, files are saved in " + rootdir + "/Data")
|
||||||
|
setup_finish_time = time.perf_counter()
|
||||||
|
setupTime = round(setup_finish_time - setup_start_time, 2)
|
||||||
|
|
||||||
|
except:
|
||||||
|
log_exception("Setup is NOT completed! - " + rootdir)
|
||||||
|
|
||||||
|
# generate MPC config file for MPC configuration
|
||||||
|
mk_MPC_config_file(scaleVector, envFlowList, rootdir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
log_info(
|
||||||
|
"Compilation of MPC file is started. - "
|
||||||
|
+ rootdir
|
||||||
|
+ " Please wait...It takes time to compile...(estimated time 5-10 mins)."
|
||||||
|
)
|
||||||
|
mpc_start_time = time.perf_counter()
|
||||||
|
createMPCFile(rootdir)
|
||||||
|
log_info(
|
||||||
|
"MPC file is compiled and ready to be used - "
|
||||||
|
+ rootdir
|
||||||
|
+ "/Programs/SuppChainAgg"
|
||||||
|
)
|
||||||
|
mpc_finish_time = time.perf_counter()
|
||||||
|
mpcTime = round(mpc_finish_time - mpc_start_time, 2)
|
||||||
|
|
||||||
|
except:
|
||||||
|
log_exception("MPC file is NOT compiled! - " + rootdir)
|
||||||
|
|
||||||
|
with open(maindir + "/single_setup_runtime_info.json", "r") as readsetupfile:
|
||||||
|
setupfeeds = json.load(readsetupfile)
|
||||||
|
|
||||||
|
with open(maindir + "/single_test_scenario_runtime_info.json", "r") as readfile:
|
||||||
|
feeds = json.load(readfile)
|
||||||
|
|
||||||
|
dt_string = datetime.today().isoformat()
|
||||||
|
|
||||||
|
tempjson = {
|
||||||
|
"level_info": level,
|
||||||
|
"companies": str(computationArray),
|
||||||
|
"setup_time(seconds)": setupTime,
|
||||||
|
"MPC_file_compilation_time(seconds)": mpcTime,
|
||||||
|
"The time of computation ": dt_string,
|
||||||
|
}
|
||||||
|
|
||||||
|
feeds["one_level_computations"].append(tempjson)
|
||||||
|
setupfeeds["one_level_computations"].append(tempjson)
|
||||||
|
|
||||||
|
with open(maindir + "/single_test_scenario_runtime_info.json", "w") as outfile1:
|
||||||
|
json.dump(feeds, outfile1)
|
||||||
|
|
||||||
|
with open(maindir + "/single_setup_runtime_info.json", "w") as outsetupfile:
|
||||||
|
json.dump(feeds, outsetupfile)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_and_MPC_compiling_parallel(
|
||||||
|
level, subtree, rootComp, envFlowList, currentPort, maindir
|
||||||
|
):
|
||||||
|
|
||||||
|
scaleVector = []
|
||||||
|
log_debug("RootComp " + rootComp + " and Subtree " + str(subtree))
|
||||||
|
# parties involved in a single computation
|
||||||
|
computationArray = []
|
||||||
|
# root company is also a party involved in that computation, so we give scale vector value for it
|
||||||
|
# we gave 1 as considering unit value!
|
||||||
|
computationArray.append(rootComp)
|
||||||
|
scaleVector.append(1)
|
||||||
|
|
||||||
|
rootCompNumber = int(rootComp[1:])
|
||||||
|
|
||||||
|
level += 1
|
||||||
|
serverPort = currentPort + rootCompNumber * 100
|
||||||
|
# If it has suppliers
|
||||||
|
if type(subtree) == list:
|
||||||
|
with concurrent.futures.ProcessPoolExecutor() as executor:
|
||||||
|
# It is dict
|
||||||
|
# serverPort = currentPort
|
||||||
|
rootCompPort = serverPort
|
||||||
|
for supplier in subtree:
|
||||||
|
# means it has sub levels
|
||||||
|
if type(supplier) == dict:
|
||||||
|
for i in supplier:
|
||||||
|
computationArray.append(i)
|
||||||
|
scaleVector.append(random.randrange(5, 10))
|
||||||
|
subtreeLen = len(supplier[i])
|
||||||
|
rootCompPort = rootCompPort + (2 + subtreeLen)
|
||||||
|
log_debug(
|
||||||
|
"Level "
|
||||||
|
+ str(level)
|
||||||
|
+ ", Supplier "
|
||||||
|
+ str(supplier)
|
||||||
|
+ ", Subtree length "
|
||||||
|
+ str(subtreeLen)
|
||||||
|
+ ", Root Comp Number "
|
||||||
|
+ str(rootCompNumber)
|
||||||
|
+ ", Server port: "
|
||||||
|
+ str(rootCompPort)
|
||||||
|
)
|
||||||
|
log_debug(
|
||||||
|
"The time(recursive) before executor is "
|
||||||
|
+ str(time.perf_counter())
|
||||||
|
)
|
||||||
|
# run the same level computations parallelly
|
||||||
|
p = executor.submit(
|
||||||
|
setup_and_MPC_compiling_parallel,
|
||||||
|
level,
|
||||||
|
supplier[i],
|
||||||
|
i,
|
||||||
|
envFlowList,
|
||||||
|
rootCompPort,
|
||||||
|
maindir,
|
||||||
|
)
|
||||||
|
log_debug(
|
||||||
|
"The time(recursive) after executor is "
|
||||||
|
+ str(time.perf_counter())
|
||||||
|
)
|
||||||
|
# means it is in the last level (it is leaf)
|
||||||
|
else:
|
||||||
|
computationArray.append(supplier)
|
||||||
|
scaleVector.append(random.randrange(5, 10))
|
||||||
|
|
||||||
|
level -= 1
|
||||||
|
log_debug(
|
||||||
|
"The time(running setup) before executor is " + str(time.perf_counter())
|
||||||
|
)
|
||||||
|
runparallel = executor.submit(
|
||||||
|
run_each_compilation_parallel,
|
||||||
|
computationArray,
|
||||||
|
scaleVector,
|
||||||
|
serverPort,
|
||||||
|
rootComp,
|
||||||
|
maindir,
|
||||||
|
envFlowList,
|
||||||
|
level,
|
||||||
|
)
|
||||||
|
log_debug(
|
||||||
|
"The time(running setup) after executor is " + str(time.perf_counter())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_one_level_computations_recursively(
|
||||||
|
existing_status_of_test_scenario,
|
||||||
|
level,
|
||||||
|
subtree,
|
||||||
|
rootComp,
|
||||||
|
envFlowList,
|
||||||
|
currentPort,
|
||||||
|
maindir,
|
||||||
|
):
|
||||||
|
|
||||||
|
log_debug("RootComp " + rootComp + " and Subtree " + str(subtree))
|
||||||
|
# parties involved in a single computation
|
||||||
|
computationArray = []
|
||||||
|
# root company is also a party involved in that computation, so we give scale vector value for it
|
||||||
|
# we gave 1 as considering unit value!
|
||||||
|
computationArray.append(rootComp)
|
||||||
|
|
||||||
|
rootCompNumber = int(rootComp[1:])
|
||||||
|
|
||||||
|
level += 1
|
||||||
|
serverPort = currentPort + rootCompNumber * 100
|
||||||
|
# If it has suppliers
|
||||||
|
if type(subtree) == list:
|
||||||
|
with concurrent.futures.ProcessPoolExecutor() as executor:
|
||||||
|
# It is dict
|
||||||
|
# serverPort = currentPort
|
||||||
|
rootCompPort = serverPort
|
||||||
|
for supplier in subtree:
|
||||||
|
# means it has sub levels
|
||||||
|
if type(supplier) == dict:
|
||||||
|
for i in supplier:
|
||||||
|
computationArray.append(i)
|
||||||
|
subtreeLen = len(supplier[i])
|
||||||
|
rootCompPort = rootCompPort + (2 + subtreeLen)
|
||||||
|
log_debug(
|
||||||
|
"Level "
|
||||||
|
+ str(level)
|
||||||
|
+ ", Supplier "
|
||||||
|
+ str(supplier)
|
||||||
|
+ ", Subtree length "
|
||||||
|
+ str(subtreeLen)
|
||||||
|
+ ", Root Comp Number "
|
||||||
|
+ str(rootCompNumber)
|
||||||
|
+ ", Server port: "
|
||||||
|
+ str(rootCompPort)
|
||||||
|
)
|
||||||
|
# run the same level computations parallelly
|
||||||
|
p = executor.submit(
|
||||||
|
run_one_level_computations_recursively,
|
||||||
|
existing_status_of_test_scenario,
|
||||||
|
level,
|
||||||
|
supplier[i],
|
||||||
|
i,
|
||||||
|
envFlowList,
|
||||||
|
rootCompPort,
|
||||||
|
maindir,
|
||||||
|
)
|
||||||
|
|
||||||
|
# means it is in the last level (it is leaf)
|
||||||
|
else:
|
||||||
|
computationArray.append(supplier)
|
||||||
|
level -= 1
|
||||||
|
|
||||||
|
###### There should be threading and after all threads(downstreams) finished
|
||||||
|
# we can continue with other steps like
|
||||||
|
|
||||||
|
if len(computationArray) > 2:
|
||||||
|
log_info(
|
||||||
|
"One level computation will happen between "
|
||||||
|
+ str(computationArray)
|
||||||
|
+ " \n And server port is: "
|
||||||
|
+ str(serverPort)
|
||||||
|
)
|
||||||
|
rootCA = rootComp
|
||||||
|
rootdir = maindir + "/" + rootCA + "/Root"
|
||||||
|
log_debug("RootCA directory of the computation is " + rootdir)
|
||||||
|
|
||||||
|
# get the env flows' value list of companies for the current computation
|
||||||
|
envFlowValueList = get_list_of_comp_env_flows_value(
|
||||||
|
maindir, computationArray
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# also give the port number for server ------- currentPort
|
||||||
|
shell_cmd3 = (
|
||||||
|
"python3 "
|
||||||
|
+ rootdir
|
||||||
|
+ "/server.py -c RunningServer -p "
|
||||||
|
+ str(serverPort)
|
||||||
|
+ " > "
|
||||||
|
+ rootdir
|
||||||
|
+ "/server.log 2>&1 && echo '$!' > ser.pid"
|
||||||
|
)
|
||||||
|
child3 = pexpect.spawn(
|
||||||
|
"/bin/bash", ["-c", shell_cmd3], echo=False, timeout=None
|
||||||
|
)
|
||||||
|
log_info("Server(" + str(serverPort) + ") is running... - " + rootdir)
|
||||||
|
with open(rootdir + "/ser.pid", "w") as server:
|
||||||
|
server.write("$!")
|
||||||
|
time.sleep(2)
|
||||||
|
except:
|
||||||
|
log_exception("Server is NOT running! - " + rootdir)
|
||||||
|
|
||||||
|
if existing_status_of_test_scenario == False:
|
||||||
|
try:
|
||||||
|
# This part should be threading too
|
||||||
|
# here also we need to give port numbers for suppliers ------- currentPort
|
||||||
|
# calculating running time for one level computation
|
||||||
|
player_start_time = time.perf_counter()
|
||||||
|
with concurrent.futures.ProcessPoolExecutor() as executor:
|
||||||
|
# each player needs to give the same port number (we give the started port number)
|
||||||
|
tempPort = serverPort + 1
|
||||||
|
log_info(
|
||||||
|
"One level MPC Computation is started! \n Main company of the computation is "
|
||||||
|
+ rootCA
|
||||||
|
)
|
||||||
|
for i in range(len(computationArray)):
|
||||||
|
supplier = computationArray[i]
|
||||||
|
# print("Supplier " + supplier) #delete
|
||||||
|
supplier_dir = supplier + "_" + rootCA
|
||||||
|
CERT = supplier + "_" + rootCA + ".crt"
|
||||||
|
log_info(
|
||||||
|
supplier
|
||||||
|
+ " player join the computation with "
|
||||||
|
+ CERT
|
||||||
|
+ " certificate"
|
||||||
|
)
|
||||||
|
log_debug("Root Directory for this computation " + rootdir)
|
||||||
|
p = executor.submit(
|
||||||
|
multi_process_computation,
|
||||||
|
supplier,
|
||||||
|
CERT,
|
||||||
|
tempPort,
|
||||||
|
maindir,
|
||||||
|
supplier_dir,
|
||||||
|
)
|
||||||
|
log_info("MPC Computation is completed! - " + rootCA)
|
||||||
|
player_finish_time = time.perf_counter()
|
||||||
|
onelevelrunningTime = round(
|
||||||
|
player_finish_time - player_start_time, 2
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
log_exception(
|
||||||
|
"Multiprocessing have problems! PlayerBinary.x is NOT completed!"
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
player_start_time = time.perf_counter()
|
||||||
|
with concurrent.futures.ProcessPoolExecutor() as executor:
|
||||||
|
# each player needs to give the same port number (we give the started port number)
|
||||||
|
tempPort = serverPort + 1
|
||||||
|
log_info(
|
||||||
|
"One level MPC Computation is started! \n Main company of the computation is "
|
||||||
|
+ rootCA
|
||||||
|
)
|
||||||
|
for i in range(len(computationArray)):
|
||||||
|
supplier = computationArray[i]
|
||||||
|
supplier_dir = supplier + "_" + rootCA
|
||||||
|
CERT = supplier + "_" + rootCA + ".crt"
|
||||||
|
log_info(
|
||||||
|
supplier
|
||||||
|
+ " player join the computation with "
|
||||||
|
+ CERT
|
||||||
|
+ " certificate"
|
||||||
|
)
|
||||||
|
log_debug("Root Directory for this computation " + rootdir)
|
||||||
|
p = executor.submit(
|
||||||
|
existing_scenario_computation,
|
||||||
|
supplier,
|
||||||
|
CERT,
|
||||||
|
serverPort,
|
||||||
|
tempPort,
|
||||||
|
maindir,
|
||||||
|
supplier_dir,
|
||||||
|
)
|
||||||
|
log_info("MPC Computation is completed! - " + rootCA)
|
||||||
|
player_finish_time = time.perf_counter()
|
||||||
|
onelevelrunningTime = round(
|
||||||
|
player_finish_time - player_start_time, 2
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
log_exception(
|
||||||
|
"Multiprocessing have problems! PlayerBinary.x is NOT completed!"
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(
|
||||||
|
maindir + "/single_test_scenario_runtime_info.json", "r"
|
||||||
|
) as readfile:
|
||||||
|
feeds = json.load(readfile)
|
||||||
|
|
||||||
|
dt_string = datetime.today().isoformat()
|
||||||
|
|
||||||
|
feeds["one_level_computations"].append(
|
||||||
|
{
|
||||||
|
"level_info": level,
|
||||||
|
"companies": str(computationArray),
|
||||||
|
"running_time_of_computation(seconds)": onelevelrunningTime,
|
||||||
|
"The time of computation ": dt_string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(
|
||||||
|
maindir + "/single_test_scenario_runtime_info.json", "w"
|
||||||
|
) as outfile1:
|
||||||
|
json.dump(feeds, outfile1)
|
||||||
|
|
||||||
|
update_validaiton_file(
|
||||||
|
maindir,
|
||||||
|
level,
|
||||||
|
computationArray,
|
||||||
|
envFlowValueList,
|
||||||
|
rootCA,
|
||||||
|
serverPort,
|
||||||
|
tempPort,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif len(computationArray) == 1:
|
||||||
|
log_debug("It is a just supplier company, no sub-suppliers " + str(subtree))
|
||||||
|
else:
|
||||||
|
log_error(
|
||||||
|
"You are trying to run a scenario that we do NOT support! "
|
||||||
|
+ str(subtree)
|
||||||
|
)
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
log_info("Supply Chain Computation is COMPLETED!")
|
||||||
|
|
||||||
|
|
||||||
|
def argument_func(*args):
|
||||||
|
|
||||||
|
main_parser = argparse.ArgumentParser(
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
description="Server Service module.",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-ts",
|
||||||
|
"--treeStructure",
|
||||||
|
type=int,
|
||||||
|
default="0",
|
||||||
|
help="give tree structure to the system \n"
|
||||||
|
+ "0 - if you would like to have balanced tree \n"
|
||||||
|
+ "1 - if you would like to have right filled tree",
|
||||||
|
)
|
||||||
|
main_conf = main_parser.parse_args(args)
|
||||||
|
|
||||||
|
return main_conf
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
ch = logging.StreamHandler(sys.stderr)
|
||||||
|
ch.setFormatter(
|
||||||
|
logging.Formatter(const_log_format("generate_and_run_test_scenario.py"))
|
||||||
|
)
|
||||||
|
logger.addHandler(ch)
|
||||||
|
if const_verbose():
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
treeSt = argument_func(*sys.argv[1:])
|
||||||
|
if (treeSt.treeStructure != 1) and (treeSt.treeStructure != 0):
|
||||||
|
log_error(
|
||||||
|
"There is no identified structure with this argument parameter: "
|
||||||
|
+ str(treeSt.treeStructure)
|
||||||
|
)
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
# starting port
|
||||||
|
currentPort = 4999
|
||||||
|
|
||||||
|
# main_dir = os.path.realpath(__file__)
|
||||||
|
main_dir = os.getcwd()
|
||||||
|
|
||||||
|
if os.path.isfile("test_scenarios_runtime_info.json"):
|
||||||
|
with open("test_scenarios_runtime_info.json", "r") as readfile:
|
||||||
|
allTestScenarios = json.load(readfile)
|
||||||
|
else:
|
||||||
|
allTestScenarios = {"TestScenarios": []}
|
||||||
|
|
||||||
|
# Environmental Flow List wanted in computations
|
||||||
|
environmentalFlowList = ["cd", "sd"]
|
||||||
|
|
||||||
|
# Test scenarios - how many parties involved and how many supplier parties in each layer
|
||||||
|
# test_scenarios_dict={"test1":[3,2],"test2":[7,2],"test2":[5,2],"test7":[7,2],"test9":[7,6]}
|
||||||
|
test_scenarios_dict = {"test_3_2_t2": [3, 2]}
|
||||||
|
|
||||||
|
# Run each test scenario
|
||||||
|
for i in test_scenarios_dict:
|
||||||
|
# Create directory for test scenario
|
||||||
|
test_sce_path = main_dir + "/" + i
|
||||||
|
|
||||||
|
companies_num = test_scenarios_dict[i][0]
|
||||||
|
supplier_num = test_scenarios_dict[i][1]
|
||||||
|
companies = give_name_to_companies(companies_num)
|
||||||
|
|
||||||
|
# if os.path.isdir(test_sce_path):
|
||||||
|
# shutil.rmtree(test_sce_path)
|
||||||
|
# #print("Test scenario folder - "+test_sce_path+ " is deleted") # delete
|
||||||
|
# log_debug("Old test scenario folder - "+test_sce_path+ " is deleted")
|
||||||
|
existing_status_of_test_scenario = False
|
||||||
|
if os.path.isdir(test_sce_path):
|
||||||
|
existing_status_of_test_scenario = True
|
||||||
|
for i in companies:
|
||||||
|
testComPath = test_sce_path + "/" + i
|
||||||
|
generate_random_values_for_envflows(testComPath, environmentalFlowList)
|
||||||
|
|
||||||
|
else:
|
||||||
|
os.mkdir(test_sce_path)
|
||||||
|
log_debug("Create directory for " + i + " test scenario")
|
||||||
|
|
||||||
|
# copies companies from companies directory to test directory
|
||||||
|
# we also need env flow list because we are giving this info default
|
||||||
|
copy_companies_to_test_directory(
|
||||||
|
companies, environmentalFlowList, main_dir, test_sce_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.path.isfile(
|
||||||
|
test_sce_path + "/computations_results_of_test_scenario.json"
|
||||||
|
):
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/computations_results_of_test_scenario.json", "r"
|
||||||
|
) as readfile:
|
||||||
|
TestScenarioComputationResult = json.load(readfile)
|
||||||
|
else:
|
||||||
|
TestScenarioComputationResult = {"ComputationResultsofTestScenario": []}
|
||||||
|
|
||||||
|
computationValidation = {
|
||||||
|
"one_level_computations": [],
|
||||||
|
"whole_test_scenario": {},
|
||||||
|
}
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/computation_validation_info.json", "w"
|
||||||
|
) as com_output:
|
||||||
|
json.dump(computationValidation, com_output)
|
||||||
|
|
||||||
|
# testing SetupBinary Step
|
||||||
|
singleSetupRuntimes = {"one_level_computations": []}
|
||||||
|
with open(test_sce_path + "/single_setup_runtime_info.json", "w") as outfile:
|
||||||
|
json.dump(singleSetupRuntimes, outfile)
|
||||||
|
|
||||||
|
# Test scenario runtime information
|
||||||
|
singleTestScenarioConfig = {
|
||||||
|
"one_level_computations": [],
|
||||||
|
"whole_test_scenario": {},
|
||||||
|
}
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/single_test_scenario_runtime_info.json", "w"
|
||||||
|
) as outfile1:
|
||||||
|
json.dump(singleTestScenarioConfig, outfile1)
|
||||||
|
|
||||||
|
log_info(
|
||||||
|
"Start to run test scenario "
|
||||||
|
+ i
|
||||||
|
+ " for "
|
||||||
|
+ str(companies_num)
|
||||||
|
+ " companies - "
|
||||||
|
+ str(supplier_num)
|
||||||
|
+ " suppliers model"
|
||||||
|
)
|
||||||
|
# generate the supply chain model
|
||||||
|
h = mk_supply_chain_structure(companies, supplier_num, treeSt.treeStructure)
|
||||||
|
log_info(
|
||||||
|
"Supply chain structure is generated for ("
|
||||||
|
+ str(companies_num)
|
||||||
|
+ "-"
|
||||||
|
+ str(supplier_num)
|
||||||
|
+ "): \n"
|
||||||
|
+ str(h)
|
||||||
|
)
|
||||||
|
|
||||||
|
root = ""
|
||||||
|
for i in h:
|
||||||
|
root = i # main company
|
||||||
|
log_info("Main company of the supply chain is " + root)
|
||||||
|
start = time.perf_counter()
|
||||||
|
level = 1
|
||||||
|
# run setup and compiling setups parallely
|
||||||
|
set_and_compile = 0
|
||||||
|
if existing_status_of_test_scenario == False:
|
||||||
|
set_and_compile = 1
|
||||||
|
setup_and_MPC_compiling_parallel(
|
||||||
|
level, h[root], root, environmentalFlowList, currentPort, test_sce_path
|
||||||
|
)
|
||||||
|
log_info("Needs to set up and compile MPC file! ")
|
||||||
|
else:
|
||||||
|
log_info(
|
||||||
|
"The test scenario exists! No need for setup and compilation. MPC Computation is started!"
|
||||||
|
)
|
||||||
|
|
||||||
|
run_one_level_computations_recursively(
|
||||||
|
existing_status_of_test_scenario,
|
||||||
|
level,
|
||||||
|
h[root],
|
||||||
|
root,
|
||||||
|
environmentalFlowList,
|
||||||
|
currentPort,
|
||||||
|
test_sce_path,
|
||||||
|
)
|
||||||
|
finish = time.perf_counter()
|
||||||
|
runningTime = round(finish - start, 2)
|
||||||
|
print("Finished in " + str(runningTime) + " second(s)")
|
||||||
|
|
||||||
|
with open(test_sce_path + "/computation_validation_info.json", "r") as compfile:
|
||||||
|
compload = json.load(compfile)
|
||||||
|
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/single_test_scenario_runtime_info.json", "r"
|
||||||
|
) as readfile:
|
||||||
|
feeds = json.load(readfile)
|
||||||
|
singleTest = {}
|
||||||
|
singleTest["name_of_test _scenario"] = test_sce_path
|
||||||
|
singleTest["supply_chain_structure"] = h
|
||||||
|
singleTest["number_of_parties_in_the_supply_chain"] = companies_num
|
||||||
|
singleTest["number_of_suppliers_in_each_computation"] = supplier_num
|
||||||
|
singleTest["number_of_environmental_flows"] = len(environmentalFlowList)
|
||||||
|
singleTest["name_of_environmental_flows"] = environmentalFlowList
|
||||||
|
singleTest["running_time_of_scenario(seconds)"] = runningTime
|
||||||
|
|
||||||
|
compload["whole_test_scenario"] = singleTest
|
||||||
|
feeds["whole_test_scenario"] = singleTest
|
||||||
|
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/computation_validation_info.json", "w"
|
||||||
|
) as compoutput:
|
||||||
|
json.dump(compload, compoutput)
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/single_test_scenario_runtime_info.json", "w"
|
||||||
|
) as outfile1:
|
||||||
|
json.dump(feeds, outfile1)
|
||||||
|
|
||||||
|
compload["setup_and_mpc_compilation"] = set_and_compile
|
||||||
|
compload["observing_running_times"] = feeds
|
||||||
|
TestScenarioComputationResult["ComputationResultsofTestScenario"].append(
|
||||||
|
compload
|
||||||
|
)
|
||||||
|
|
||||||
|
feeds["setup_and_mpc_compilation"] = set_and_compile
|
||||||
|
allTestScenarios["TestScenarios"].append(feeds)
|
||||||
|
|
||||||
|
with open(
|
||||||
|
test_sce_path + "/computations_results_of_test_scenario.json", "w"
|
||||||
|
) as compout:
|
||||||
|
json.dump(TestScenarioComputationResult, compout)
|
||||||
|
|
||||||
|
with open("test_scenarios_runtime_info.json", "w") as outfile:
|
||||||
|
json.dump(allTestScenarios, outfile)
|
|
@ -0,0 +1,6 @@
|
||||||
|
Werkzeug==0.16.1
|
||||||
|
httplib2==0.14.0
|
||||||
|
pexpect==4.8.0
|
||||||
|
requests==2.22.0
|
||||||
|
Flask==1.1.1
|
||||||
|
pyOpenSSL==19.1.0
|
|
@ -0,0 +1,51 @@
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
numEnvFl=$numEnvFl
|
||||||
|
|
||||||
|
enFlValues=Array(numEnvFl,sint)
|
||||||
|
|
||||||
|
|
||||||
|
enFlName=[$enFlName]
|
||||||
|
scaleVector=Array($companyNumber,sint)
|
||||||
|
|
||||||
|
def GetFlows():
|
||||||
|
|
||||||
|
companyNumber=$companyNumber
|
||||||
|
|
||||||
|
for n in range(companyNumber):
|
||||||
|
# gets the values of scale vector as secret
|
||||||
|
scaleVector[n]=sint.get_private_input_from(0)
|
||||||
|
|
||||||
|
for i in range(companyNumber):
|
||||||
|
|
||||||
|
for j in range(numEnvFl):
|
||||||
|
|
||||||
|
if_then(enFlValues[j].reveal!=0)
|
||||||
|
|
||||||
|
enFlValues[j]+=(sint.get_private_input_from(i)*scaleVector[i])
|
||||||
|
|
||||||
|
else_then()
|
||||||
|
|
||||||
|
enFlValues[j]=(sint.get_private_input_from(i)*scaleVector[i])
|
||||||
|
|
||||||
|
end_if()
|
||||||
|
|
||||||
|
GetFlows()
|
||||||
|
for k in range(numEnvFl):
|
||||||
|
enFlValues[k].reveal_to(0)
|
|
@ -0,0 +1,53 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
"""
|
||||||
|
Onboarding and offboarding of suppliers.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pplca.onboarding import createInitialFolders, joinSupplyChain
|
||||||
|
from pplca.config import const_log_format
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
logging.getLogger("requests").setLevel(logging.WARNING)
|
||||||
|
logger = logging.getLogger()
|
||||||
|
ch = logging.StreamHandler(sys.stdout)
|
||||||
|
ch.setFormatter(logging.Formatter(const_log_format("boarding.py")))
|
||||||
|
logger.addHandler(ch)
|
||||||
|
|
||||||
|
p = argparse.ArgumentParser()
|
||||||
|
subparsers = p.add_subparsers()
|
||||||
|
|
||||||
|
option1_parser = subparsers.add_parser(
|
||||||
|
"setInitialSteps", help="Create main folders"
|
||||||
|
)
|
||||||
|
option1_parser.set_defaults(func=createInitialFolders)
|
||||||
|
|
||||||
|
option2_parser = subparsers.add_parser(
|
||||||
|
"joinSupplyChain", help="Joining Supply Chain"
|
||||||
|
)
|
||||||
|
option2_parser.set_defaults(func=joinSupplyChain)
|
||||||
|
|
||||||
|
args = p.parse_args()
|
||||||
|
args.func(args)
|
|
@ -0,0 +1,132 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
"""
|
||||||
|
Computation component.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from pplca.config import const_log_format, const_verbose
|
||||||
|
from pplca.compute import update_env_flows_list, start_computation
|
||||||
|
from pplca.log_utils import log_info
|
||||||
|
|
||||||
|
|
||||||
|
def log_conf(cwd, conf):
|
||||||
|
"""Log configuration."""
|
||||||
|
log_info("Configuration:")
|
||||||
|
log_info(" Base dir: {}".format(cwd))
|
||||||
|
log_info(" Command: {}".format(conf.cmd))
|
||||||
|
|
||||||
|
|
||||||
|
def main(conf):
|
||||||
|
"""Main function: Computation Module."""
|
||||||
|
logger = logging.getLogger()
|
||||||
|
ch = logging.StreamHandler(sys.stdout)
|
||||||
|
ch.setFormatter(logging.Formatter(const_log_format("computation.py")))
|
||||||
|
logger.addHandler(ch)
|
||||||
|
# cwd = os.getcwd()
|
||||||
|
if conf.verbose:
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.setLevel(logging.WARNING)
|
||||||
|
log_conf(conf.compDir, conf)
|
||||||
|
|
||||||
|
retval = False
|
||||||
|
if conf.cmd == "update":
|
||||||
|
retval = update_env_flows_list(conf.compDir, conf.compSuppDir)
|
||||||
|
elif conf.cmd == "computation":
|
||||||
|
retval = start_computation(
|
||||||
|
conf.companyName,
|
||||||
|
conf.suppDirName,
|
||||||
|
conf.compSuppDir,
|
||||||
|
conf.compDir,
|
||||||
|
conf.certificate,
|
||||||
|
conf.portNumber,
|
||||||
|
conf.envFlowValue,
|
||||||
|
)
|
||||||
|
|
||||||
|
return retval
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main_parser = argparse.ArgumentParser(
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter, description="Computation module."
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--cmd",
|
||||||
|
metavar="cmd",
|
||||||
|
choices=["computation", "update"],
|
||||||
|
default="computation",
|
||||||
|
help="Command:\n"
|
||||||
|
+ " computation: start computation\n"
|
||||||
|
+ " update: Update the list of environmental flows",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--portNumber",
|
||||||
|
type=int,
|
||||||
|
default="0",
|
||||||
|
help="give port number for supplier",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-env",
|
||||||
|
"--envFlowValue",
|
||||||
|
type=bool,
|
||||||
|
default=True,
|
||||||
|
help="give the values of environmental flows for computation",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-cert", "--certificate", default="", help="give certificate that will be used"
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-cn",
|
||||||
|
"--companyName",
|
||||||
|
default="",
|
||||||
|
help="give company(root) name that will be used",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-comsuppd",
|
||||||
|
"--compSuppDir",
|
||||||
|
default="",
|
||||||
|
help="give the directory of the supplier company",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-suppname",
|
||||||
|
"--suppDirName",
|
||||||
|
default="",
|
||||||
|
help="give the name of the directory of the supplier company",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-comd",
|
||||||
|
"--compDir",
|
||||||
|
default="",
|
||||||
|
help="give the directory of the company",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-v",
|
||||||
|
"--verbose",
|
||||||
|
action="store_true",
|
||||||
|
default=const_verbose(),
|
||||||
|
help="increase verbosity",
|
||||||
|
)
|
||||||
|
main_conf = main_parser.parse_args()
|
||||||
|
sys.exit(main(main_conf))
|
|
@ -0,0 +1,336 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
"""Basic computation module."""
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
|
import shutil
|
||||||
|
import logging
|
||||||
|
import zipfile
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
import pexpect
|
||||||
|
import requests
|
||||||
|
from OpenSSL import crypto
|
||||||
|
from pplca.config import (
|
||||||
|
const_default_host,
|
||||||
|
const_specified_host,
|
||||||
|
const_cert_dir,
|
||||||
|
const_data_dir,
|
||||||
|
const_mpc_program_dir,
|
||||||
|
const_envflows_dir,
|
||||||
|
const_mpc_program,
|
||||||
|
)
|
||||||
|
from pplca.log_utils import log_debug, log_info
|
||||||
|
|
||||||
|
|
||||||
|
def get_certificate_list(portNumber):
|
||||||
|
"""Get the list of signed certificates"""
|
||||||
|
# url = const_default_host() + "certificatesdb/certlist/"
|
||||||
|
url = const_specified_host(portNumber) + "certificatesdb/certlist/"
|
||||||
|
return requests.get(url).content
|
||||||
|
|
||||||
|
|
||||||
|
def get_computation_id(portNumber, certificate):
|
||||||
|
"""Get computation ID."""
|
||||||
|
# url = const_default_host() + "computationId/"
|
||||||
|
url = const_specified_host(portNumber) + "computationId/"
|
||||||
|
return int(requests.get(url + str(certificate)).content)
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_certificates(portNumber, cwd, certlist):
|
||||||
|
"""Obtain list of all certificates."""
|
||||||
|
|
||||||
|
# url = const_default_host() + "cert/"
|
||||||
|
url = const_specified_host(portNumber) + "cert/"
|
||||||
|
crt = const_cert_dir(cwd)
|
||||||
|
|
||||||
|
jsonlist = json.loads(certlist.decode("utf-8"))
|
||||||
|
|
||||||
|
log_debug("List of Certificates ")
|
||||||
|
for j in jsonlist["certs"]["CertNames"]:
|
||||||
|
|
||||||
|
response = requests.get(url + j)
|
||||||
|
try:
|
||||||
|
signed_client_certificate = crypto.load_certificate(
|
||||||
|
crypto.FILETYPE_PEM, response.content
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
log_debug(
|
||||||
|
"The signed certificate is not loaded!!!" + "Response: " + str(response)
|
||||||
|
)
|
||||||
|
|
||||||
|
certpath = crt + j
|
||||||
|
try:
|
||||||
|
with open(certpath, "wb") as cert_store:
|
||||||
|
cert_store.write(
|
||||||
|
crypto.dump_certificate(
|
||||||
|
crypto.FILETYPE_PEM, signed_client_certificate
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
log_debug("CRT Stored Here :" + certpath)
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
log_debug("Certificate " + j + " is not saved in " + crt)
|
||||||
|
|
||||||
|
log_info("Get the signed certificates used in computation " "and save in " + crt)
|
||||||
|
|
||||||
|
|
||||||
|
def get_computation_file(portNumber, cwd):
|
||||||
|
"""Get the compiled MPC files."""
|
||||||
|
# url = const_default_host() + "computationfile"
|
||||||
|
url = const_specified_host(portNumber) + "computationfile"
|
||||||
|
|
||||||
|
response = requests.get(url)
|
||||||
|
comdir = const_mpc_program_dir(cwd) + "/" + const_mpc_program()
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(
|
||||||
|
BytesIO(base64.b64decode(response.content)), "r"
|
||||||
|
) as zip_ref:
|
||||||
|
zip_ref.extractall(comdir)
|
||||||
|
log_debug("Get the MPC computation folder" + comdir)
|
||||||
|
except:
|
||||||
|
log_debug("MPC folder is not a zip file " + str(zip_ref))
|
||||||
|
|
||||||
|
|
||||||
|
def get_env_flows_list_file(portNumber, compdir):
|
||||||
|
"""Get content of Env Flows File."""
|
||||||
|
url = const_specified_host(portNumber) + "envflowslistfile"
|
||||||
|
|
||||||
|
response = requests.get(url)
|
||||||
|
envflowdir = const_envflows_dir(compdir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(
|
||||||
|
BytesIO(base64.b64decode(response.content)), "r"
|
||||||
|
) as zip_ref:
|
||||||
|
zip_ref.extractall(envflowdir)
|
||||||
|
log_debug("Get environmental flows list file to" + envflowdir)
|
||||||
|
except:
|
||||||
|
log_debug("Environmental flows list file is not a zip file " + str(zip_ref))
|
||||||
|
|
||||||
|
|
||||||
|
def get_data_files(portNumber, cwd, com_id):
|
||||||
|
"""Get content of Data folder."""
|
||||||
|
# url = const_default_host() + "datafolder/"
|
||||||
|
url = const_specified_host(portNumber) + "datafolder/"
|
||||||
|
|
||||||
|
response = requests.get(url + str(com_id))
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(
|
||||||
|
BytesIO(base64.b64decode(response.content)), "r"
|
||||||
|
) as zip_ref:
|
||||||
|
zip_ref.extractall(cwd + "/Data")
|
||||||
|
log_debug("Get files of Data folder and save in " + cwd + "/Data")
|
||||||
|
|
||||||
|
except:
|
||||||
|
log_debug("Files of Data folder are not a zip file " + str(zip_ref))
|
||||||
|
|
||||||
|
|
||||||
|
def mk_env_flows_and_values_file(compDir):
|
||||||
|
# Generate initial values of env flows
|
||||||
|
envFlowDict = {"EnvFlows": {"NameofEnvFlow": [], "ValueofEnvFlow": []}}
|
||||||
|
|
||||||
|
with open(compDir + "/envflowslist.json") as envflo:
|
||||||
|
envFlowsList = json.load(envflo)
|
||||||
|
|
||||||
|
for i in envFlowsList["NameofEnvFlow"]:
|
||||||
|
envFlowDict["EnvFlows"]["NameofEnvFlow"].append(i)
|
||||||
|
flowValue = input(
|
||||||
|
"How many/much {} is emitted/produced for one unit product ? : ".format(
|
||||||
|
i
|
||||||
|
)
|
||||||
|
)
|
||||||
|
envFlowDict["EnvFlows"]["ValueofEnvFlow"].append(str(flowValue))
|
||||||
|
|
||||||
|
with open(compDir + "/env-flows.json", "w") as f:
|
||||||
|
json.dump(envFlowDict, f)
|
||||||
|
|
||||||
|
|
||||||
|
def run_computation(comName, cwd, compdir, com_id, portNumber, envFlowValue):
|
||||||
|
"""Run the actual computation."""
|
||||||
|
env_flow_value_list = []
|
||||||
|
data = {}
|
||||||
|
if envFlowValue:
|
||||||
|
mk_env_flows_and_values_file(compdir)
|
||||||
|
try:
|
||||||
|
with open(compdir + "/env-flows.json") as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
except:
|
||||||
|
log_debug("Cannot find env-flow.json in " + cwd)
|
||||||
|
|
||||||
|
if os.path.exists(compdir + "/Root/MPC.json"):
|
||||||
|
with open(compdir + "/Root/MPC.json") as MPCconfigFile:
|
||||||
|
MPCdata = json.load(MPCconfigFile)
|
||||||
|
|
||||||
|
for j in range(len(data["EnvFlows"]["ValueofEnvFlow"])):
|
||||||
|
env_flow_value_list.append(data["EnvFlows"]["ValueofEnvFlow"][j])
|
||||||
|
print(data["EnvFlows"]["ValueofEnvFlow"][j])
|
||||||
|
|
||||||
|
try:
|
||||||
|
log_debug("Start to Podman Container for Player " + str(comName))
|
||||||
|
|
||||||
|
shell_cmd = (
|
||||||
|
"podman run --cidfile "
|
||||||
|
+ cwd
|
||||||
|
+ "/playercontainerId --pod mypod -it --volume "
|
||||||
|
+ cwd
|
||||||
|
+ "/Cert-Store:/opt/src/SCALE-MAMBA/Cert-Store --volume "
|
||||||
|
+ cwd
|
||||||
|
+ "/Data:/opt/src/SCALE-MAMBA/Data --volume "
|
||||||
|
+ cwd
|
||||||
|
+ "/Programs:/opt/src/SCALE-MAMBA/Programs -w /opt/src/SCALE-MAMBA/ localhost/scale-mamba-latest PlayerBinary.x -pnb "
|
||||||
|
+ str(portNumber)
|
||||||
|
+ " "
|
||||||
|
+ str(com_id)
|
||||||
|
+ " Programs/"
|
||||||
|
+ const_mpc_program()
|
||||||
|
)
|
||||||
|
|
||||||
|
child = pexpect.spawn("/bin/bash", ["-c", shell_cmd], echo=False, timeout=None)
|
||||||
|
|
||||||
|
if com_id == 0:
|
||||||
|
for j, _ in enumerate(MPCdata["ScaleVector"]):
|
||||||
|
child.expect("Input channel")
|
||||||
|
child.sendline(MPCdata["ScaleVector"][j])
|
||||||
|
|
||||||
|
for i, _ in enumerate(env_flow_value_list):
|
||||||
|
child.expect("Input channel")
|
||||||
|
child.sendline(str(env_flow_value_list[i]))
|
||||||
|
# log_debug("!!! Waiting for input - ", str(env_flow_value_list[i]))
|
||||||
|
|
||||||
|
with open(cwd + "/output.out", "w") as output_file:
|
||||||
|
output_file.write(child.read().decode("UTF-8"))
|
||||||
|
|
||||||
|
print(child.read().decode("UTF-8"))
|
||||||
|
|
||||||
|
except:
|
||||||
|
print("MPC computation is NOT completed - compute.py - " + cwd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
con_rm_cmd = (
|
||||||
|
"podman rm --cidfile "
|
||||||
|
+ cwd
|
||||||
|
+ "/playercontainerId && rm "
|
||||||
|
+ cwd
|
||||||
|
+ "/playercontainerId"
|
||||||
|
)
|
||||||
|
os.system(con_rm_cmd)
|
||||||
|
log_debug("Player's container is removed! - " + cwd)
|
||||||
|
except:
|
||||||
|
log_debug("Player's container could NOT be removed! - " + cwd)
|
||||||
|
|
||||||
|
update_env_flows_list(compdir, cwd)
|
||||||
|
log_info(
|
||||||
|
"The result (updated environmental flows) is saved in "
|
||||||
|
+ compdir
|
||||||
|
+ "/env-flows.json file."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_list_of_env_flow(cwd):
|
||||||
|
|
||||||
|
word_list = []
|
||||||
|
|
||||||
|
with open(
|
||||||
|
const_mpc_program_dir(cwd) + "/" + const_mpc_program() + ".mpc"
|
||||||
|
) as mpcfile:
|
||||||
|
line = mpcfile.readline()
|
||||||
|
while line:
|
||||||
|
if "enFlName" in line:
|
||||||
|
words = line.rsplit("'")
|
||||||
|
for i in range(len(words) - 1):
|
||||||
|
if i % 2 == 0:
|
||||||
|
print(words[i + 1])
|
||||||
|
word_list.append(words[i + 1])
|
||||||
|
break
|
||||||
|
line = mpcfile.readline()
|
||||||
|
return word_list
|
||||||
|
|
||||||
|
|
||||||
|
##############
|
||||||
|
def update_env_flows_list(compdir, compSuppDir):
|
||||||
|
|
||||||
|
with open(compdir + "/env-flows.json") as envflo:
|
||||||
|
envFlowsList = json.load(envflo)
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
file_path = compSuppDir + "/output.out"
|
||||||
|
log_debug("Checking for: " + file_path)
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
with open(file_path) as computation_result:
|
||||||
|
line = computation_result.readline()
|
||||||
|
log_debug("Found and looking for the values " + "of Environmental Flows")
|
||||||
|
while line:
|
||||||
|
if "Output channel 0" in line:
|
||||||
|
array = line.split(" ")
|
||||||
|
arrlen = len(array)
|
||||||
|
value = array[arrlen - 1].replace("\n", "")
|
||||||
|
envFlowsList["EnvFlows"]["ValueofEnvFlow"][count] = str(value)
|
||||||
|
count = count + 1
|
||||||
|
line = computation_result.readline()
|
||||||
|
else:
|
||||||
|
log_debug("The file " + file_path + " is not found!")
|
||||||
|
if count != 0:
|
||||||
|
with open(compdir + "/env-flows.json", "w") as json_file:
|
||||||
|
json.dump(envFlowsList, json_file)
|
||||||
|
log_info(
|
||||||
|
"The result of computation (new value list of environmental flows) : "
|
||||||
|
+ str(envFlowsList["EnvFlows"]["ValueofEnvFlow"])
|
||||||
|
)
|
||||||
|
log_info(
|
||||||
|
"The value of Environmental Flows are updated and saved in "
|
||||||
|
+ compdir
|
||||||
|
+ "/env-flows.json"
|
||||||
|
)
|
||||||
|
log_debug(
|
||||||
|
"The result of computation (new value list of environmental flows) : "
|
||||||
|
+ str(envFlowsList["EnvFlows"]["ValueofEnvFlow"])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_needed_folders_and_files(playername, cwd, compdir):
|
||||||
|
os.makedirs(cwd + "/Data")
|
||||||
|
os.makedirs(cwd + "/Programs/SuppChainAgg")
|
||||||
|
os.makedirs(cwd + "/Cert-Store")
|
||||||
|
original = compdir + "/Root/Cert-Store/" + playername + ".key"
|
||||||
|
target = cwd + "/Cert-Store/" + playername + ".key"
|
||||||
|
shutil.copy(original, target, follow_symlinks=False)
|
||||||
|
|
||||||
|
|
||||||
|
##############
|
||||||
|
def start_computation(
|
||||||
|
comName, suppdirname, cwd, compdir, certificate, portNum, envFlowValue
|
||||||
|
):
|
||||||
|
generate_needed_folders_and_files(suppdirname, cwd, compdir)
|
||||||
|
portNumber = str(int(portNum) - 1)
|
||||||
|
log_info("Get all necessary files from server...")
|
||||||
|
certlist = get_certificate_list(portNumber)
|
||||||
|
get_all_certificates(portNumber, cwd, certlist)
|
||||||
|
get_computation_file(portNumber, cwd)
|
||||||
|
if envFlowValue:
|
||||||
|
get_env_flows_list_file(portNumber, compdir)
|
||||||
|
com_id = get_computation_id(portNumber, certificate)
|
||||||
|
log_debug("Computation id of the company is " + str(com_id))
|
||||||
|
get_data_files(portNumber, cwd, com_id)
|
||||||
|
log_info("Ready for computation...")
|
||||||
|
run_computation(comName, cwd, compdir, com_id, portNum, envFlowValue)
|
||||||
|
log_info("Computation is completed.")
|
|
@ -0,0 +1,71 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
"""Global configuration."""
|
||||||
|
def const_default_host(path=""):
|
||||||
|
"""Default host URL."""
|
||||||
|
return "http://127.0.0.1:4999/" + path
|
||||||
|
|
||||||
|
|
||||||
|
def const_specified_host(server, path=""):
|
||||||
|
"""Default host URL."""
|
||||||
|
return "http://127.0.0.1:" + str(server) + "/" + path
|
||||||
|
|
||||||
|
|
||||||
|
def const_verbose():
|
||||||
|
"""Default verbosity."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def const_log_format(ext_id="-" * 16):
|
||||||
|
return "%(process)6s %(asctime)s %(levelname)8s {:<16} %(message)s".format(ext_id)
|
||||||
|
|
||||||
|
|
||||||
|
def const_cert_dir(basedir="."):
|
||||||
|
"""Certificate store"""
|
||||||
|
return basedir + "/Cert-Store/"
|
||||||
|
|
||||||
|
|
||||||
|
def const_data_dir(basedir="."):
|
||||||
|
"""Data store."""
|
||||||
|
return basedir + "/Data/"
|
||||||
|
|
||||||
|
|
||||||
|
def const_mpc_program():
|
||||||
|
return "SuppChainAgg"
|
||||||
|
|
||||||
|
|
||||||
|
def const_mpc_program_dir(basedir="."):
|
||||||
|
"""MPC program directory."""
|
||||||
|
return basedir + "/Programs"
|
||||||
|
|
||||||
|
|
||||||
|
def const_mpc_template_dir(basedir="."):
|
||||||
|
"""MPC template directory."""
|
||||||
|
return basedir + "/Program-Templates"
|
||||||
|
|
||||||
|
|
||||||
|
def const_upload_dir(basedir="."):
|
||||||
|
"""Upload directory."""
|
||||||
|
return basedir + "/UploadedFolder"
|
||||||
|
|
||||||
|
|
||||||
|
def const_envflows_dir(basedir="."):
|
||||||
|
"""Upload directory."""
|
||||||
|
return basedir
|
|
@ -0,0 +1,76 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
""" Various log utilities."""
|
||||||
|
|
||||||
|
import traceback
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pplca.config import const_log_format
|
||||||
|
|
||||||
|
|
||||||
|
def value_of(value, default):
|
||||||
|
"""Get value or default value if None."""
|
||||||
|
if value is not None and value != "":
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
def log_debug(msg, indent_level=0):
|
||||||
|
logging.debug(4 * indent_level * " " + str(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def log_info(msg, indent_level=0):
|
||||||
|
logging.info(4 * indent_level * " " + str(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def log_warning(msg, indent_level=0):
|
||||||
|
logging.warning(4 * indent_level * " " + str(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def log_error(msg, indent_level=0):
|
||||||
|
logging.error(4 * indent_level * " " + str(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def log_exception(msg, indent_level=0):
|
||||||
|
logging.error(4 * indent_level * " " + str(msg))
|
||||||
|
for line in traceback.format_exc().splitlines():
|
||||||
|
logging.error(4 * indent_level * " " + line)
|
||||||
|
|
||||||
|
|
||||||
|
def set_logger_tag(ext_id):
|
||||||
|
logger = logging.getLogger()
|
||||||
|
for handler in logger.handlers:
|
||||||
|
handler.setFormatter(logging.Formatter(const_log_format(ext_id)))
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logger(verbose):
|
||||||
|
if verbose:
|
||||||
|
loglevel = logging.INFO
|
||||||
|
else:
|
||||||
|
loglevel = logging.WARNING
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
logger.setLevel(loglevel)
|
||||||
|
ch = logging.StreamHandler(sys.stdout)
|
||||||
|
logger.addHandler(ch)
|
||||||
|
|
||||||
|
set_logger_tag("-" * 16)
|
|
@ -0,0 +1,197 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
"""
|
||||||
|
Onboarding and offboarding module.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from OpenSSL import crypto
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
from pplca.config import (
|
||||||
|
const_cert_dir,
|
||||||
|
const_default_host,
|
||||||
|
const_mpc_program_dir,
|
||||||
|
const_mpc_program,
|
||||||
|
const_log_format,
|
||||||
|
)
|
||||||
|
from pplca.log_utils import log_debug, log_info
|
||||||
|
|
||||||
|
|
||||||
|
def joinSupplyChain(args):
|
||||||
|
cwd = os.getcwd()
|
||||||
|
url = const_default_host("comdb/company")
|
||||||
|
fileName = makeConfigFile()
|
||||||
|
with open(fileName) as json_file:
|
||||||
|
dat = json.load(json_file)
|
||||||
|
log_info("You should introduce yourself to the system to join supply chain.")
|
||||||
|
|
||||||
|
data = {"name": dat["comName"]}
|
||||||
|
log_info("Trying to join supply chain: " + dat["comName"])
|
||||||
|
response = requests.post(url, json=data)
|
||||||
|
log_info("Reponse: " + response.text, 2)
|
||||||
|
log_info("Generating keys", 2)
|
||||||
|
generatekey(cwd, dat["comName"])
|
||||||
|
log_info("Successfully joined supply chain: " + dat["comName"], 2)
|
||||||
|
|
||||||
|
|
||||||
|
###################
|
||||||
|
def makeConfigFile():
|
||||||
|
config = {}
|
||||||
|
|
||||||
|
config["comName"] = input("What is the company name: \n")
|
||||||
|
# c = 'US'
|
||||||
|
config["c"] = input("Country: \n")
|
||||||
|
# st = 'California'
|
||||||
|
config["st"] = input("State: \n")
|
||||||
|
# l = 'Berkley'
|
||||||
|
config["l"] = input("Locality: \n")
|
||||||
|
# o = 'CQB'
|
||||||
|
config["o"] = input("Organization: \n")
|
||||||
|
# ou = 'Network Operations'
|
||||||
|
config["ou"] = input("Organizational Unit: \n")
|
||||||
|
config["email"] = input("email address: \n")
|
||||||
|
|
||||||
|
fileName = config["comName"] + ".json"
|
||||||
|
|
||||||
|
with open(fileName, "w") as f:
|
||||||
|
json.dump(config, f)
|
||||||
|
return fileName
|
||||||
|
|
||||||
|
|
||||||
|
def updateInfo(url):
|
||||||
|
|
||||||
|
comId = input("What is your company id: \n")
|
||||||
|
|
||||||
|
comName = input("What is the new name of company?: \n")
|
||||||
|
|
||||||
|
data = {"name": comName}
|
||||||
|
|
||||||
|
response = requests.post(url + comId, json=data)
|
||||||
|
log_info(response)
|
||||||
|
|
||||||
|
|
||||||
|
####################
|
||||||
|
def leaveSupplyChain(url):
|
||||||
|
|
||||||
|
comId = input("What is your company id: \n")
|
||||||
|
response = requests.delete(url + comId)
|
||||||
|
log_info(response)
|
||||||
|
|
||||||
|
|
||||||
|
def generatekey(cwd, certname):
|
||||||
|
"""Generate Client Private key"""
|
||||||
|
# Pull these out of scope
|
||||||
|
|
||||||
|
key = crypto.PKey()
|
||||||
|
keypath = const_cert_dir(cwd) + "/" + certname + ".key"
|
||||||
|
csrpath = const_cert_dir(cwd) + "/" + certname + ".csr"
|
||||||
|
crtpath = const_cert_dir(cwd) + "/" + certname + ".crt"
|
||||||
|
|
||||||
|
if os.path.exists(keypath):
|
||||||
|
log_info("Certificate file exists, aborting.")
|
||||||
|
log_info(keypath)
|
||||||
|
sys.exit(1)
|
||||||
|
# Else write the key to the keyfile
|
||||||
|
else:
|
||||||
|
log_info("Generating Key Please standby")
|
||||||
|
key.generate_key(crypto.TYPE_RSA, 2048)
|
||||||
|
f = open(keypath, "wb")
|
||||||
|
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
generatecsr(key, certname, csrpath, keypath, crtpath)
|
||||||
|
|
||||||
|
|
||||||
|
def generatecsr(key, csrname, csrpath, keypath, crtpath):
|
||||||
|
"""Generate Client Certificate CSR - Public key"""
|
||||||
|
with open(csrname + ".json") as json_file:
|
||||||
|
dat = json.load(json_file)
|
||||||
|
# c = 'US'
|
||||||
|
# c= input("Country: ")
|
||||||
|
# st = 'California'
|
||||||
|
# st= input("State: ")
|
||||||
|
# l = 'Berkley'
|
||||||
|
# l= input("Locality: ")
|
||||||
|
# o = 'CQB'
|
||||||
|
# o= input("Organization: ")
|
||||||
|
# ou = 'Network Operations'
|
||||||
|
# ou= input("Organizational Unit: ")
|
||||||
|
# email=input("email address:")
|
||||||
|
|
||||||
|
req = crypto.X509Req()
|
||||||
|
req.get_subject().C = dat["c"]
|
||||||
|
req.get_subject().ST = dat["st"]
|
||||||
|
req.get_subject().L = dat["l"]
|
||||||
|
req.get_subject().O = dat["o"]
|
||||||
|
req.get_subject().OU = dat["ou"]
|
||||||
|
req.get_subject().CN = dat["comName"]
|
||||||
|
req.get_subject().emailAddress = dat["email"]
|
||||||
|
req.set_pubkey(key)
|
||||||
|
req.sign(key, "sha256")
|
||||||
|
|
||||||
|
if os.path.exists(csrpath):
|
||||||
|
log_info("Certificate File Exists, aborting.")
|
||||||
|
log_info(csrpath)
|
||||||
|
else:
|
||||||
|
f = open(csrpath, "wb")
|
||||||
|
f.write(crypto.dump_certificate_request(crypto.FILETYPE_PEM, req))
|
||||||
|
f.close()
|
||||||
|
log_info("Success")
|
||||||
|
log_info("Key Stored Here :" + keypath)
|
||||||
|
log_info("CSR Stored Here :" + csrpath)
|
||||||
|
|
||||||
|
requestForSigningCertificate(csrpath)
|
||||||
|
getSignedCertificate(dat["comName"], crtpath)
|
||||||
|
|
||||||
|
|
||||||
|
def requestForSigningCertificate(csrpath):
|
||||||
|
"""Send signing request to the server"""
|
||||||
|
url = const_default_host("file")
|
||||||
|
fin = open(csrpath, "rb")
|
||||||
|
files = {"file": fin}
|
||||||
|
try:
|
||||||
|
requests.post(url, files=files)
|
||||||
|
finally:
|
||||||
|
fin.close()
|
||||||
|
|
||||||
|
|
||||||
|
def getSignedCertificate(csrname, crtpath):
|
||||||
|
""""just specific certificate - own certificate"""
|
||||||
|
url = const_default_host("cert/")
|
||||||
|
|
||||||
|
response = requests.get(url + csrname + ".crt")
|
||||||
|
signedclientCert = crypto.load_certificate(crypto.FILETYPE_PEM, response.content)
|
||||||
|
|
||||||
|
crt = crtpath
|
||||||
|
f = open(crt, "wb")
|
||||||
|
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, signedclientCert))
|
||||||
|
f.close()
|
||||||
|
log_info("CRT Stored Here :" + crt)
|
||||||
|
|
||||||
|
|
||||||
|
def createInitialFolders(cwd, args):
|
||||||
|
"""Create folder hierarchy."""
|
||||||
|
os.makedirs(const_mpc_program_dir(cwd) + "/" + const_mpc_program(), exist_ok=True)
|
||||||
|
os.mkdir(const_cert_dir(cwd), exist_ok=True)
|
||||||
|
|
||||||
|
log_info("Creation of directories is Done! ")
|
|
@ -0,0 +1,982 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
"""
|
||||||
|
Server component.
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
import tempfile
|
||||||
|
import logging
|
||||||
|
import getopt
|
||||||
|
from datetime import datetime
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pexpect
|
||||||
|
import zipfile
|
||||||
|
import base64
|
||||||
|
import sys
|
||||||
|
from OpenSSL import crypto
|
||||||
|
from flask import (
|
||||||
|
Flask,
|
||||||
|
jsonify,
|
||||||
|
request,
|
||||||
|
make_response,
|
||||||
|
render_template,
|
||||||
|
redirect,
|
||||||
|
url_for,
|
||||||
|
session,
|
||||||
|
send_from_directory,
|
||||||
|
)
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
from string import Template
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
print(sys.path)
|
||||||
|
from pplca.config import (
|
||||||
|
const_log_format,
|
||||||
|
const_cert_dir,
|
||||||
|
const_data_dir,
|
||||||
|
const_mpc_program_dir,
|
||||||
|
const_mpc_program,
|
||||||
|
const_envflows_dir,
|
||||||
|
const_upload_dir,
|
||||||
|
)
|
||||||
|
from pplca.log_utils import log_info, log_debug, log_warning
|
||||||
|
from pplca.config import const_log_format, const_verbose
|
||||||
|
|
||||||
|
# cwd = os.getcwd()
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
ALLOWED_EXTENSIONS = set(["py", "crt", "mpc", "csr"])
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
comDB = [
|
||||||
|
{
|
||||||
|
"id": -1,
|
||||||
|
"name": "RootCompany",
|
||||||
|
# 'password':'Com1',
|
||||||
|
# 'serialnumber':'1001'
|
||||||
|
# Actually right now we have different comp running command
|
||||||
|
"computation running command": "./PlayerBinary.x your_id Program",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
certificatesDB = [
|
||||||
|
{
|
||||||
|
"serial_number": 1000,
|
||||||
|
"cert_name": "RootCA.crt",
|
||||||
|
"uploaded_time": "2019/10/14 17:45:25",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/")
|
||||||
|
def home():
|
||||||
|
if not session.get("logged_in"):
|
||||||
|
return render_template("login.html")
|
||||||
|
else:
|
||||||
|
return render_template("index.html")
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/comdb/company", methods=["GET"])
|
||||||
|
def getAllCom():
|
||||||
|
return jsonify({"comps": comDB})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/certificatesdb/certlist/", methods=["GET"])
|
||||||
|
def sendCertList():
|
||||||
|
|
||||||
|
"""Send List of Signed Certificates to the client"""
|
||||||
|
certlist = {"CertNames": []}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(cwd + "/config.json") as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
except IOError:
|
||||||
|
log_debug("(Sending Cert List) - Config file could not found in " + cwd)
|
||||||
|
|
||||||
|
certlist["CertNames"].append(data["Start"]["RootCAname"] + ".crt")
|
||||||
|
|
||||||
|
for name in data["Start"]["Certs"]["CertName"]:
|
||||||
|
certlist["CertNames"].append(name)
|
||||||
|
|
||||||
|
return jsonify({"certs": certlist})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/comdb/company/<comId>", methods=["GET"])
|
||||||
|
def getCom(comId):
|
||||||
|
|
||||||
|
usr = [com for com in comDB if (com["id"] == comId)]
|
||||||
|
|
||||||
|
return jsonify({"com": usr})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/computationId/<comName>", methods=["GET"])
|
||||||
|
def getID(comName):
|
||||||
|
# cwd=os.getcwd()
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
try:
|
||||||
|
with open(cwd + "/config.json") as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
except IOError:
|
||||||
|
log_debug("Config file could not found in " + cwd)
|
||||||
|
|
||||||
|
counter = 0
|
||||||
|
for cert in data["Start"]["Certs"]["CertName"]:
|
||||||
|
|
||||||
|
if cert == comName:
|
||||||
|
log_debug("Company id " + str(counter) + " is sent to Company " + comName)
|
||||||
|
return str(counter)
|
||||||
|
else:
|
||||||
|
counter += 1
|
||||||
|
log_debug("Certificate " + comName + " could not be found.")
|
||||||
|
return "Could not find the certificate"
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/comdb/company/<comId>", methods=["PUT"])
|
||||||
|
def updateCom(comId):
|
||||||
|
|
||||||
|
company = [com for com in comDB if (com["id"] == comId)]
|
||||||
|
|
||||||
|
if "name" in request.json:
|
||||||
|
company[0]["name"] = request.json["name"]
|
||||||
|
|
||||||
|
log_info(
|
||||||
|
"Company with id "
|
||||||
|
+ str(comId)
|
||||||
|
+ " has changed the company name "
|
||||||
|
+ str(company[0]["name"])
|
||||||
|
)
|
||||||
|
log_debug(
|
||||||
|
"Company with id "
|
||||||
|
+ str(comId)
|
||||||
|
+ " has changed the company name "
|
||||||
|
+ str(company[0]["name"])
|
||||||
|
)
|
||||||
|
return jsonify({"com": company[0]})
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/comdb/company", methods=["POST"])
|
||||||
|
def createCom():
|
||||||
|
|
||||||
|
if not request.json or "name" not in request.json:
|
||||||
|
log_debug("It is not json request or it does not have the company name.")
|
||||||
|
os.abort(400)
|
||||||
|
|
||||||
|
data = request.json
|
||||||
|
|
||||||
|
for com in comDB:
|
||||||
|
if data["name"] == com["name"]:
|
||||||
|
return "This name exists."
|
||||||
|
|
||||||
|
dat = {
|
||||||
|
"id": comDB[-1]["id"] + 1,
|
||||||
|
"name": request.json["name"],
|
||||||
|
# we have different comp running command right now (docker)
|
||||||
|
"computation running command": "./PlayerBinary.x your_id Program",
|
||||||
|
}
|
||||||
|
|
||||||
|
comDB.append(dat)
|
||||||
|
|
||||||
|
log_info("New company " + str(data["name"]) + " is added to the computation.")
|
||||||
|
log_debug("Company " + str(data["name"]) + " is added to the computation.")
|
||||||
|
return jsonify(dat)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/comdb/company/<comId>", methods=["DELETE"])
|
||||||
|
def deleteCom(comId):
|
||||||
|
|
||||||
|
company = [com for com in comDB if (com["id"] == comId)]
|
||||||
|
|
||||||
|
if len(company) == 0:
|
||||||
|
os.abort(404)
|
||||||
|
|
||||||
|
comDB.remove(company[0])
|
||||||
|
|
||||||
|
log_debug(
|
||||||
|
"Company " + str(company[0]["name"]) + " is deleted from the computation group"
|
||||||
|
)
|
||||||
|
log_info("Company " + str(company[0]["name"]) + " left the computation group")
|
||||||
|
return jsonify({"response": "Success"})
|
||||||
|
|
||||||
|
|
||||||
|
@app.errorhandler(404)
|
||||||
|
def not_found(error):
|
||||||
|
|
||||||
|
return make_response(jsonify({"error": "not found"}), 404)
|
||||||
|
|
||||||
|
|
||||||
|
def allowed_file(filename):
|
||||||
|
return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/file", methods=["GET", "POST"])
|
||||||
|
def upload_file():
|
||||||
|
|
||||||
|
# cwd=os.getcwd()
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
""""Link in order to upload a file"""
|
||||||
|
if request.method == "POST":
|
||||||
|
|
||||||
|
file = request.files["file"]
|
||||||
|
# check if the post request has the file part
|
||||||
|
if "file" not in request.files:
|
||||||
|
log_debug("File " + str(file) + " could not found!!!")
|
||||||
|
return redirect(request.url)
|
||||||
|
# if user does not select file, browser also
|
||||||
|
# submit an empty part without filename
|
||||||
|
if file.filename == "":
|
||||||
|
log_debug("No selected file. Empty!!!")
|
||||||
|
return redirect(request.url)
|
||||||
|
|
||||||
|
if file and allowed_file(file.filename):
|
||||||
|
filename = secure_filename(file.filename)
|
||||||
|
file.save(os.path.join(const_upload_dir(cwd), filename))
|
||||||
|
|
||||||
|
return redirect(url_for("uploaded_file", filename=filename))
|
||||||
|
|
||||||
|
return """
|
||||||
|
<!doctype html>
|
||||||
|
<title>Upload new File</title>
|
||||||
|
<h1>Upload new File</h1>
|
||||||
|
<form method=post enctype=multipart/form-data>
|
||||||
|
<input type=file name=file>
|
||||||
|
<input type=submit value=Upload>
|
||||||
|
</form>
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/uploads/<filename>")
|
||||||
|
def uploaded_file(filename):
|
||||||
|
# cwd=os.getcwd()
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
"""Link for the uploaded files"""
|
||||||
|
name, ext = os.path.splitext(filename)
|
||||||
|
log_debug(
|
||||||
|
"Uploaded file " + filename + ", file name " + name + " and extension " + ext
|
||||||
|
)
|
||||||
|
splitdata = name.split("_")
|
||||||
|
|
||||||
|
if ext == ".csr":
|
||||||
|
try:
|
||||||
|
with open(const_upload_dir(cwd) + "/" + filename, "rb") as my_cert_file:
|
||||||
|
my_cert_text = my_cert_file.read()
|
||||||
|
log_debug(str(my_cert_file) + " is read")
|
||||||
|
clientcert = crypto.load_certificate_request(
|
||||||
|
crypto.FILETYPE_PEM, my_cert_text
|
||||||
|
)
|
||||||
|
log_debug("Sign client certificate " + str(clientcert))
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
log_debug(
|
||||||
|
"Certificate "
|
||||||
|
+ filename
|
||||||
|
+ " could not found in "
|
||||||
|
+ const_upload_dir(cwd)
|
||||||
|
)
|
||||||
|
|
||||||
|
newfile = signCertificates(clientcert, splitdata[1])
|
||||||
|
newname = name + ".crt"
|
||||||
|
crtpath = const_cert_dir(cwd) + newname
|
||||||
|
log_debug("Storing CRT Stored Here :" + crtpath)
|
||||||
|
with open(crtpath, "wb") as f:
|
||||||
|
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, newfile))
|
||||||
|
dat = {
|
||||||
|
"serial_number": newfile.get_serial_number(),
|
||||||
|
"cert_name": newname,
|
||||||
|
"uploaded_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
}
|
||||||
|
certificatesDB.append(dat)
|
||||||
|
|
||||||
|
return send_from_directory(const_upload_dir(cwd), filename)
|
||||||
|
|
||||||
|
|
||||||
|
def signCertificates(clientcert, name):
|
||||||
|
# cwd=os.getcwd()
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
"""Sign Clients Certificates"""
|
||||||
|
my_cert_file = open(const_cert_dir(cwd) + name + ".crt", "rb")
|
||||||
|
my_cert_text = my_cert_file.read()
|
||||||
|
readablecert = crypto.load_certificate(crypto.FILETYPE_PEM, my_cert_text)
|
||||||
|
my_cert_file.close()
|
||||||
|
|
||||||
|
keyfile = open(const_cert_dir(cwd) + name + ".key", "rb")
|
||||||
|
|
||||||
|
mykey = keyfile.read()
|
||||||
|
|
||||||
|
readableprivatekey = crypto.load_privatekey(crypto.FILETYPE_PEM, mykey)
|
||||||
|
keyfile.close()
|
||||||
|
|
||||||
|
CAcert = crypto.X509()
|
||||||
|
log_info(certificatesDB[-1]["serial_number"] + 1)
|
||||||
|
CAcert.set_serial_number(certificatesDB[-1]["serial_number"] + 1)
|
||||||
|
CAcert.gmtime_adj_notBefore(0)
|
||||||
|
CAcert.gmtime_adj_notAfter(5 * 365 * 24 * 60 * 60)
|
||||||
|
CAcert.set_issuer(readablecert.get_subject())
|
||||||
|
CAcert.set_subject(clientcert.get_subject())
|
||||||
|
CAcert.set_pubkey(clientcert.get_pubkey())
|
||||||
|
CAcert.sign(readableprivatekey, "sha256")
|
||||||
|
|
||||||
|
log_info("Certificate of supplier company " + name + " is signed.")
|
||||||
|
return CAcert
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/cert/<filename>", methods=["GET"])
|
||||||
|
def sendBackSignedCertificate(filename):
|
||||||
|
# cwd=os.getcwd()
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
"""Send the certificate to the client"""
|
||||||
|
path = const_cert_dir(cwd) + filename
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile() as tmp:
|
||||||
|
fileN = tmp.name
|
||||||
|
|
||||||
|
with open(fileN, "w") as writefile:
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as my_cert_file:
|
||||||
|
writefile = my_cert_file.read()
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
log_debug(path + " could not found!")
|
||||||
|
|
||||||
|
log_debug(filename + " signed certificate is sent to the supplier company back.")
|
||||||
|
return writefile
|
||||||
|
|
||||||
|
|
||||||
|
def generateSelfSignedCert(cwd):
|
||||||
|
"""Self Signed Certificate"""
|
||||||
|
certname = input("RootCA name: \n")
|
||||||
|
k = crypto.PKey()
|
||||||
|
k.generate_key(crypto.TYPE_RSA, 4096)
|
||||||
|
CERT_FILE = certname + ".crt"
|
||||||
|
KEY_FILE = certname + ".key"
|
||||||
|
cert = crypto.X509()
|
||||||
|
|
||||||
|
if not (
|
||||||
|
os.path.exists(const_cert_dir(cwd) + CERT_FILE)
|
||||||
|
or (os.path.exists(const_cert_dir(cwd) + KEY_FILE))
|
||||||
|
):
|
||||||
|
|
||||||
|
# create a self-signed cert
|
||||||
|
cert.get_subject().C = input("Country: \n")
|
||||||
|
cert.get_subject().ST = input("State: \n")
|
||||||
|
cert.get_subject().L = input("Locality: \n")
|
||||||
|
cert.get_subject().O = input("Organization: \n")
|
||||||
|
cert.get_subject().OU = input("Organizational Unit: \n")
|
||||||
|
cert.get_subject().CN = certname
|
||||||
|
cert.gmtime_adj_notBefore(0)
|
||||||
|
cert.gmtime_adj_notAfter(5 * 365 * 24 * 60 * 60)
|
||||||
|
cert.set_issuer(cert.get_subject())
|
||||||
|
cert.set_pubkey(k)
|
||||||
|
cert.sign(k, "sha256")
|
||||||
|
|
||||||
|
log_debug("Creating Root CA for " + certname)
|
||||||
|
open(const_cert_dir(cwd) + CERT_FILE, "wb").write(
|
||||||
|
crypto.dump_certificate(crypto.FILETYPE_PEM, cert)
|
||||||
|
)
|
||||||
|
open(const_cert_dir(cwd) + KEY_FILE, "wb").write(
|
||||||
|
crypto.dump_privatekey(crypto.FILETYPE_PEM, k)
|
||||||
|
)
|
||||||
|
log_info("Name of Root Certificate and key are " + certname)
|
||||||
|
|
||||||
|
else:
|
||||||
|
log_warning(
|
||||||
|
certname
|
||||||
|
+ "certificate-key pair is exists in directory "
|
||||||
|
+ const_cert_dir(cwd)
|
||||||
|
+ "!!"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def makeMPCconfigFile(rootdir):
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(rootdir + "/config.json") as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
log_debug("Could not find config.json file in directory " + rootdir)
|
||||||
|
|
||||||
|
comNumber = int(data["Start"]["NumberofPlayers"])
|
||||||
|
# input("How many companies will join the computation?(count yourself
|
||||||
|
# too) : ")
|
||||||
|
|
||||||
|
MPC_config = {"ScaleVector": [], "NameofEnvFlows": [], "NumofEnvFlows": []}
|
||||||
|
|
||||||
|
for i in range(comNumber):
|
||||||
|
if i == 0:
|
||||||
|
finalDemand = input(
|
||||||
|
"How many/much the product is produced by {} company? : ".format(
|
||||||
|
data["Start"]["Certs"]["CertName"][i]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
MPC_config["ScaleVector"].append(str(finalDemand))
|
||||||
|
else:
|
||||||
|
value = input(
|
||||||
|
"How many/much the product is needed from {} company? ".format(
|
||||||
|
data["Start"]["Certs"]["CertName"][i]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
MPC_config["ScaleVector"].append(str(value))
|
||||||
|
|
||||||
|
numEnvFlows = str(
|
||||||
|
input("How many environmental flows the system will have in the computation?: ")
|
||||||
|
)
|
||||||
|
MPC_config["NumofEnvFlows"].append(numEnvFlows)
|
||||||
|
|
||||||
|
for j in numEnvFlows:
|
||||||
|
envflow = input("Name of environmental flow")
|
||||||
|
MPC_config["NameofEnvFlows"].append(envflow)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(rootdir + "MPC.json", "w") as MPCfile:
|
||||||
|
json.dump(MPC_config, MPCfile)
|
||||||
|
|
||||||
|
log_debug(
|
||||||
|
"Scale vector and environmental flow list used in MPC saved in directory "
|
||||||
|
+ rootdir
|
||||||
|
+ "/MPC.json"
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
log_exception("MPC file (MPC.json) is NOT created! - " + rootdir)
|
||||||
|
|
||||||
|
|
||||||
|
def makeEnvFlowsListFile(enFlName, rootdir):
|
||||||
|
|
||||||
|
envFlowDict = {"NameofEnvFlow": []}
|
||||||
|
|
||||||
|
for i in enFlName:
|
||||||
|
envFlowDict["NameofEnvFlow"].append(i)
|
||||||
|
|
||||||
|
with open(rootdir + "/envflowslist.json", "w") as f:
|
||||||
|
json.dump(envFlowDict, f)
|
||||||
|
|
||||||
|
|
||||||
|
def createMPCFile(rootdir):
|
||||||
|
"""Rewritting the mpc file"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(rootdir + "/config.json") as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
log_debug("Could not find config.json file in directory " + rootdir)
|
||||||
|
|
||||||
|
if os.path.exists(rootdir + "/MPC.json"):
|
||||||
|
log_debug("MPC Config file exists! - " + rootdir)
|
||||||
|
|
||||||
|
else:
|
||||||
|
makeMPCconfigFile(rootdir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(rootdir + "/MPC.json") as MPCconfigFile:
|
||||||
|
MPCdata = json.load(MPCconfigFile)
|
||||||
|
except IOError:
|
||||||
|
log_debug("Could not find MPC.json file in " + rootdir)
|
||||||
|
try:
|
||||||
|
newfile = (
|
||||||
|
const_mpc_program_dir(rootdir)
|
||||||
|
+ "/"
|
||||||
|
+ const_mpc_program()
|
||||||
|
+ "/"
|
||||||
|
+ const_mpc_program()
|
||||||
|
+ ".mpc"
|
||||||
|
)
|
||||||
|
f = open(newfile, "wt")
|
||||||
|
|
||||||
|
comNumber = int(data["Start"]["NumberofPlayers"])
|
||||||
|
# input("How many companies will join the computation?(count yourself
|
||||||
|
# too) : ")
|
||||||
|
|
||||||
|
""""
|
||||||
|
scaleVector = ""
|
||||||
|
for i in range(comNumber):
|
||||||
|
# main company will be the first company in computation
|
||||||
|
if i == 0:
|
||||||
|
# finalDemand = input("How many/much the product is produced by
|
||||||
|
# {} company? : ".format(data['Start']['Certs']['CertName'][i]))
|
||||||
|
finalDemand = MPCdata["ScaleVector"][i]
|
||||||
|
scaleVector += str(finalDemand) + ","
|
||||||
|
elif i == (comNumber - 1):
|
||||||
|
# value = input("How many/much the product is needed from {}
|
||||||
|
# company? ".format(data['Start']['Certs']['CertName'][i]))
|
||||||
|
value =MPCdata["ScaleVector"][i]
|
||||||
|
scaleVector += str(value)
|
||||||
|
else:
|
||||||
|
# value=input("How many/much the product is needed from {}
|
||||||
|
# company? ".format(data['Start']['Certs']['CertName'][i]))
|
||||||
|
value = MPCdata["ScaleVector"][i]
|
||||||
|
scaleVector += str(value) + ","
|
||||||
|
"""
|
||||||
|
|
||||||
|
# numEnvFl = input("How many environmental flows the system will have
|
||||||
|
# in the computation?: ")
|
||||||
|
numEnvFl = MPCdata["NumofEnvFlows"][0]
|
||||||
|
enFlName = ""
|
||||||
|
for i in range(int(numEnvFl)):
|
||||||
|
# enFlName+="'"+input("Environmental flow name :")+"',"
|
||||||
|
enFlName += "'" + MPCdata["NameofEnvFlows"][i] + "',"
|
||||||
|
# enFlName+="'"+input("Environmental flow name :")+"'"
|
||||||
|
enFlName += "'" + MPCdata["NameofEnvFlows"][int(numEnvFl) - 1] + "'"
|
||||||
|
|
||||||
|
# create Environmental Flows json File for players
|
||||||
|
makeEnvFlowsListFile(MPCdata["NameofEnvFlows"], rootdir)
|
||||||
|
|
||||||
|
supplychainfile = (
|
||||||
|
rootdir + "/Program-Templates/SuppChainAgg/SuppChainAgg.mpc-template"
|
||||||
|
)
|
||||||
|
computationFile = open(supplychainfile)
|
||||||
|
src = Template(computationFile.read())
|
||||||
|
d = {
|
||||||
|
"numEnvFl": numEnvFl,
|
||||||
|
"enFlName": enFlName,
|
||||||
|
"companyNumber": comNumber,
|
||||||
|
# "scaleVector": scaleVector,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = src.substitute(d)
|
||||||
|
f.write(result)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
log_debug("MPC file is created in directory " + newfile)
|
||||||
|
except:
|
||||||
|
log_exception("MPC file is NOT created - " + newfile)
|
||||||
|
|
||||||
|
compileMPCFile(rootdir)
|
||||||
|
|
||||||
|
|
||||||
|
def compileMPCFile(rootdir):
|
||||||
|
"""Compile the mpc file"""
|
||||||
|
try:
|
||||||
|
cmd = (
|
||||||
|
"podman run --cidfile "
|
||||||
|
+ rootdir
|
||||||
|
+ "/mpccontainerId -it --volume "
|
||||||
|
+ rootdir
|
||||||
|
+ "/Data:/opt/src/SCALE-MAMBA/Data --volume "
|
||||||
|
+ rootdir
|
||||||
|
+ "/Programs:/opt/src/SCALE-MAMBA/Programs -w /opt/src/SCALE-MAMBA/ localhost/scale-mamba-latest compile-new.sh "
|
||||||
|
+ const_mpc_program_dir()
|
||||||
|
+ "/"
|
||||||
|
+ const_mpc_program()
|
||||||
|
)
|
||||||
|
|
||||||
|
print(cmd)
|
||||||
|
with open("/tmp/cmd.txt", "w") as command:
|
||||||
|
command.write(cmd)
|
||||||
|
os.system(cmd)
|
||||||
|
log_debug("MPC file is compiled.")
|
||||||
|
log_info("MPC computation file SuppChainAgg.mpc is ready to be used.")
|
||||||
|
except:
|
||||||
|
log_exception("MPC file could NOT be compiled!")
|
||||||
|
|
||||||
|
try:
|
||||||
|
con_rm_cmd = (
|
||||||
|
"podman rm --cidfile "
|
||||||
|
+ rootdir
|
||||||
|
+ "/mpccontainerId && rm "
|
||||||
|
+ rootdir
|
||||||
|
+ "/mpccontainerId"
|
||||||
|
)
|
||||||
|
os.system(con_rm_cmd)
|
||||||
|
log_debug("MPC's container is removed! - " + rootdir)
|
||||||
|
except:
|
||||||
|
log_debug("MPC's container could NOT be removed! - " + rootdir)
|
||||||
|
|
||||||
|
|
||||||
|
def zipComputationFiles(path, ziph):
|
||||||
|
"""Zip and send the folder that contains computation files"""
|
||||||
|
for root, dirs, files in os.walk(path):
|
||||||
|
for f in files:
|
||||||
|
ziph.write(os.path.join(root, f), f, zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/computationfile", methods=["GET"])
|
||||||
|
def sendComputationFile():
|
||||||
|
"""send MPC computation file to suppliers"""
|
||||||
|
dir = const_mpc_program_dir(cwd) + "/" + const_mpc_program()
|
||||||
|
with tempfile.NamedTemporaryFile(suffix="zip", delete=False) as tmp:
|
||||||
|
zipname = tmp.name
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(zipname, "w") as zipf:
|
||||||
|
zipComputationFiles(dir, zipf)
|
||||||
|
except IOError:
|
||||||
|
log_debug(dir + " could not be found.")
|
||||||
|
|
||||||
|
with open(zipname, "rb") as zipfolder:
|
||||||
|
myzipfolder = base64.b64encode(zipfolder.read())
|
||||||
|
|
||||||
|
log_debug("MPC computation folder is sent to the supplier")
|
||||||
|
return myzipfolder
|
||||||
|
|
||||||
|
|
||||||
|
def zipEnvFile(path, ziph, envfile):
|
||||||
|
"""Zip and send the folder that contains computation files"""
|
||||||
|
ziph.write(os.path.join(path, envfile), envfile, zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/envflowslistfile", methods=["GET"])
|
||||||
|
def sendEnvFlowsFile():
|
||||||
|
"""send Environmental Flows List file to suppliers"""
|
||||||
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
# envdir = const_envflows_dir(cwd)
|
||||||
|
|
||||||
|
envfile = "envflowslist.json"
|
||||||
|
zipname = cwd + "/env-flows.zip"
|
||||||
|
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(zipname, "w") as zipf:
|
||||||
|
zipEnvFile(cwd, zipf, envfile)
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
log_debug(envdir + " could not be found.")
|
||||||
|
print("Not found")
|
||||||
|
|
||||||
|
with open(zipname, "rb") as zipfolder:
|
||||||
|
myzipfolder = base64.b64encode(zipfolder.read())
|
||||||
|
|
||||||
|
log_debug("Environmental Flows List file is sent to the supplier")
|
||||||
|
return myzipfolder
|
||||||
|
|
||||||
|
|
||||||
|
def zipDataFolder(path, ziph, keyword):
|
||||||
|
"""Zip and send the Data folder(MAC key and other files)"""
|
||||||
|
keyword2 = "MKey"
|
||||||
|
keyword3 = keyword2 + "-" + str(keyword) + ".key"
|
||||||
|
for root, dirs, files in os.walk(path):
|
||||||
|
for file in files:
|
||||||
|
if keyword2 in file:
|
||||||
|
if keyword3 in file:
|
||||||
|
ziph.write(os.path.join(root, file), file, zipfile.ZIP_DEFLATED)
|
||||||
|
log_debug("MAC key of supplier is " + keyword3)
|
||||||
|
else:
|
||||||
|
ziph.write(os.path.join(root, file), file, zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/datafolder/<comId>", methods=["GET"])
|
||||||
|
def sendDataFolder(comId):
|
||||||
|
"""send the files of Data folder to suppliers"""
|
||||||
|
with tempfile.NamedTemporaryFile(suffix="zip", delete=False) as tmp:
|
||||||
|
zipname = tmp.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(zipname, "w") as zipf:
|
||||||
|
zipDataFolder(const_data_dir(cwd), zipf, comId)
|
||||||
|
except IOError:
|
||||||
|
log_debug(const_data_dir(cwd) + " folder could not be found.")
|
||||||
|
|
||||||
|
with open(zipname, "rb") as zipfolder:
|
||||||
|
myzipfolder = base64.b64encode(zipfolder.read())
|
||||||
|
|
||||||
|
log_debug("Data folder is sent to the supplier")
|
||||||
|
return myzipfolder
|
||||||
|
|
||||||
|
|
||||||
|
def makeConfigFile(rootComp, rootdir):
|
||||||
|
"""Make Configuration file - is used for set up step in Scale mamba"""
|
||||||
|
if os.path.exists(rootdir + "/config.json"):
|
||||||
|
log_debug("Config file exists! - " + rootdir)
|
||||||
|
else:
|
||||||
|
config = {"Start": {"Certs": {"IPAdd": [], "CertName": []}}}
|
||||||
|
|
||||||
|
config["Start"]["Set-up"] = str(
|
||||||
|
input(
|
||||||
|
"What do you want to set up? \n"
|
||||||
|
+ "1) Certs \n"
|
||||||
|
+ "2) Secret Sharing \n"
|
||||||
|
+ "3) Conversion circuit for LSSS<->GC computations \n"
|
||||||
|
+ "4) All three \n"
|
||||||
|
+ "Enter a number (1-4).. \n"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
config["Start"]["RootCAname"] = input("RootCA of the computation \n")
|
||||||
|
numberofPlayers = input("Number of players \n")
|
||||||
|
config["Start"]["NumberofPlayers"] = str(numberofPlayers)
|
||||||
|
for i in range(numberofPlayers):
|
||||||
|
config["Start"]["Certs"]["IPAdd"] = input("IP Address \n")
|
||||||
|
config["Start"]["Certs"]["CertName"] = input("Which Certificate \n")
|
||||||
|
|
||||||
|
# They do not ask user anymore whether fakeOffline or fakeSacrifice
|
||||||
|
# config['Start']['FakeOffline'] = fp.readline().replace("\n", "")
|
||||||
|
# config['Start']['FakeSacrifice'] = fp.readline().replace("\n", "")
|
||||||
|
|
||||||
|
# which secret sharing scheme (in our case it is Shamir Secret Sharing)
|
||||||
|
config["Start"]["LSSS"] = "1"
|
||||||
|
# If you want to use other LSSS, you should modify your config file with requirements in SCALE-MAMBA
|
||||||
|
# + "0) Full Threshold \n"
|
||||||
|
# + "1) Shamir \n";
|
||||||
|
# + "2) Replicated \n";
|
||||||
|
# + "3) General Q2 MSP \n"
|
||||||
|
|
||||||
|
config["Start"]["Modulus"] = str(
|
||||||
|
input("What modulus do you want to use for secret sharing?")
|
||||||
|
)
|
||||||
|
config["Start"]["threshold"] = str(
|
||||||
|
input("Enter threshold 0 < t < " + str(numofComp / 2))
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(rootdir + "/config.json", "w") as f:
|
||||||
|
json.dump(config, f)
|
||||||
|
|
||||||
|
log_debug(
|
||||||
|
"Config file is generated for setting up computation (for Data folder)-"
|
||||||
|
+ rootComp
|
||||||
|
+ " Root company (in directory "
|
||||||
|
+ rootdir
|
||||||
|
+ ")"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def createInitialFolders(cwd):
|
||||||
|
"""Initial Folders"""
|
||||||
|
os.makedirs(const_upload_dir(cwd), exist_ok=True)
|
||||||
|
os.makedirs(const_mpc_program_dir(cwd) + "/" + const_mpc_program(), exist_ok=True)
|
||||||
|
os.makedirs(const_cert_dir(cwd), exist_ok=True)
|
||||||
|
os.makedirs(const_data_dir(cwd), exist_ok=True)
|
||||||
|
|
||||||
|
log_debug("Creation of Initial folders is done.")
|
||||||
|
|
||||||
|
|
||||||
|
def setInitialSteps(cwd):
|
||||||
|
createInitialFolders(cwd)
|
||||||
|
generateSelfSignedCert(cwd)
|
||||||
|
log_info("Root company is ready to be connected !")
|
||||||
|
|
||||||
|
|
||||||
|
def settingupComputation(rootCompany, rootdir):
|
||||||
|
"""For Setting up the Scale mamba SetupBinary.x"""
|
||||||
|
|
||||||
|
if os.path.isfile(rootdir + "/timing_analysis_for_setup.json"):
|
||||||
|
with open(rootdir + "/timing_analysis_for_setup.json", "r") as readfile:
|
||||||
|
timingResults = json.load(readfile)
|
||||||
|
else:
|
||||||
|
timingResults = {"TimingAnalysis": []}
|
||||||
|
|
||||||
|
singleTest = {}
|
||||||
|
singleTest["1-Beginning-of-func"] = time.perf_counter()
|
||||||
|
# log_debug("Timing start-beginning 1 !!!! "+ str(time.perf_counter()))
|
||||||
|
|
||||||
|
makeConfigFile(rootCompany, rootdir)
|
||||||
|
log_info(
|
||||||
|
"The system is setting up the configuration of the computation...It can take several mins (estimated time 10-15 mins)."
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
with open(rootdir + "/config.json") as json_file:
|
||||||
|
data = json.load(json_file)
|
||||||
|
except IOError:
|
||||||
|
log_debug("Could not find config.json file in " + rootdir)
|
||||||
|
|
||||||
|
# we can put here or when generating config file directly
|
||||||
|
if int(data["Start"]["NumberofPlayers"]) < 3:
|
||||||
|
log_error(
|
||||||
|
"You are trying to run a scenario that we do NOT support! Setup cannot run. This part of the computation you have "
|
||||||
|
+ "less than 3 participants. Therefore, this computation cannot be kept confidential."
|
||||||
|
)
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# log_debug("Timing start-shell command 2 !!!! "+ str(time.perf_counter()))
|
||||||
|
|
||||||
|
shell_cmd = (
|
||||||
|
"podman run --cidfile "
|
||||||
|
+ rootdir
|
||||||
|
+ "/setupcontainerId -it --volume "
|
||||||
|
+ rootdir
|
||||||
|
+ "/Data:/opt/src/SCALE-MAMBA/Data --volume "
|
||||||
|
+ rootdir
|
||||||
|
+ "/Cert-Store:/opt/src/SCALE-MAMBA/Cert-Store -w /opt/src/SCALE-MAMBA/ localhost/scale-mamba-latest SetupBinary.x"
|
||||||
|
)
|
||||||
|
|
||||||
|
# log_debug("Timing start-shell 3 !!!! "+ str(time.perf_counter()))
|
||||||
|
singleTest["2-Before-run-cmd"] = time.perf_counter()
|
||||||
|
child = pexpect.spawn("/bin/bash", ["-c", shell_cmd], echo=False)
|
||||||
|
|
||||||
|
# log_debug("Timing start-inputs 4 !!!! "+ str(time.perf_counter()))
|
||||||
|
singleTest["3-After-run-cmd"] = time.perf_counter()
|
||||||
|
|
||||||
|
child.expect("Enter", timeout=30)
|
||||||
|
singleTest["4-expect-input-startsetup"] = time.perf_counter()
|
||||||
|
child.sendline(str(data["Start"]["Set-up"]))
|
||||||
|
# log_debug("Timing start-inputs 5 !!!! "+ str(time.perf_counter()))
|
||||||
|
child.expect("the root CA", timeout=30)
|
||||||
|
singleTest["5-expect-input-rootCAname"] = time.perf_counter()
|
||||||
|
child.sendline(str(data["Start"]["RootCAname"]))
|
||||||
|
child.expect("Number of players", timeout=30)
|
||||||
|
singleTest["6-expect-input-numofplayers"] = time.perf_counter()
|
||||||
|
child.sendline(str(data["Start"]["NumberofPlayers"]))
|
||||||
|
singleTest["7-expect-input-start-ipandcert"] = time.perf_counter()
|
||||||
|
for i in range(int(data["Start"]["NumberofPlayers"])):
|
||||||
|
child.expect(str(r"IP Address.*\n"), timeout=300)
|
||||||
|
child.sendline(str(data["Start"]["Certs"]["IPAdd"][i]))
|
||||||
|
child.expect("Name of certificate", timeout=30)
|
||||||
|
child.sendline((str(data["Start"]["Certs"]["CertName"][i])).rstrip())
|
||||||
|
singleTest["8-expect-input-end-ipandcert"] = time.perf_counter()
|
||||||
|
# SCALE-MAMBA not asking them anymore - but maybe in future they can...
|
||||||
|
# child.expect('Fake offline')
|
||||||
|
# child.sendline(data['Start']['FakeOffline'])
|
||||||
|
# child.expect('Fake sacrifice')
|
||||||
|
# child.sendline(data['Start']['FakeSacrifice'])
|
||||||
|
child.expect("Enter a number")
|
||||||
|
singleTest["9-expect-input-LSSS"] = time.perf_counter()
|
||||||
|
child.sendline(data["Start"]["LSSS"])
|
||||||
|
child.expect("secret sharing?")
|
||||||
|
singleTest["10-expect-input-secretsharing"] = time.perf_counter()
|
||||||
|
child.sendline(data["Start"]["Modulus"])
|
||||||
|
child.expect("Enter threshold", timeout=5000)
|
||||||
|
singleTest["11-expect-input-threshold"] = time.perf_counter()
|
||||||
|
child.sendline(str(data["Start"]["threshold"]))
|
||||||
|
child.wait()
|
||||||
|
|
||||||
|
log_info("Setup for the computation is completed(Data Folder).")
|
||||||
|
singleTest["12-expect-setupcompleted"] = time.perf_counter()
|
||||||
|
|
||||||
|
except:
|
||||||
|
print("Could NOT complete Setup!!!!")
|
||||||
|
try:
|
||||||
|
singleTest["13-expect-input-before-delcontainer"] = time.perf_counter()
|
||||||
|
# log_debug("Timing start-container 6 !!!! "+ str(time.perf_counter()))
|
||||||
|
con_rm_cmd = (
|
||||||
|
"podman rm --cidfile "
|
||||||
|
+ rootdir
|
||||||
|
+ "/setupcontainerId && rm "
|
||||||
|
+ rootdir
|
||||||
|
+ "/setupcontainerId"
|
||||||
|
)
|
||||||
|
os.system(con_rm_cmd)
|
||||||
|
log_debug("Setup's container is removed! - " + rootdir)
|
||||||
|
singleTest["14-expect-input-before-delcontainer"] = time.perf_counter()
|
||||||
|
# log_debug("Timing end-container 7 !!!! "+ str(time.perf_counter()))
|
||||||
|
except:
|
||||||
|
log_debug("Setup's container could NOT be removed! - " + rootdir)
|
||||||
|
|
||||||
|
singleTest["15-expect-input-before-delcontainer"] = time.perf_counter()
|
||||||
|
# log_debug("Timing end 1 !!!! "+ str(time.perf_counter()))
|
||||||
|
|
||||||
|
timingResults["TimingAnalysis"].append(singleTest)
|
||||||
|
|
||||||
|
with open(rootdir + "/timing_analysis_for_setup.json", "w") as outfile:
|
||||||
|
json.dump(timingResults, outfile)
|
||||||
|
|
||||||
|
|
||||||
|
def runServer(portNumber):
|
||||||
|
log_info("Open server for clients")
|
||||||
|
app.secret_key = os.urandom(12)
|
||||||
|
app.run(port=portNumber)
|
||||||
|
|
||||||
|
|
||||||
|
def helpmsg():
|
||||||
|
"""Print help message."""
|
||||||
|
print("crx-extract [OPTION] extid")
|
||||||
|
print(" -h print this help text")
|
||||||
|
print(" -s silent (no log messages)")
|
||||||
|
|
||||||
|
|
||||||
|
def main(conf):
|
||||||
|
|
||||||
|
retval = False
|
||||||
|
if conf.cmd == "InitializeSupplyChain":
|
||||||
|
retval = setInitialSteps(conf.rootDir)
|
||||||
|
elif conf.cmd == "Set-up":
|
||||||
|
retval = settingupComputation(conf.rootCompany, conf.rootDir)
|
||||||
|
elif conf.cmd == "MPCFile":
|
||||||
|
retval = createMPCFile(conf.rootDir)
|
||||||
|
elif conf.cmd == "RunningServer":
|
||||||
|
retval = runServer(conf.portNumber)
|
||||||
|
|
||||||
|
return retval
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
verbose = True
|
||||||
|
try:
|
||||||
|
opts, args = getopt.getopt(
|
||||||
|
sys.argv, "hsed:a:o:w", ["date=", "archive=", "output="]
|
||||||
|
)
|
||||||
|
except getopt.GetoptError:
|
||||||
|
helpmsg()
|
||||||
|
sys.exit(2)
|
||||||
|
for opt, arg in opts:
|
||||||
|
if opt == "-h":
|
||||||
|
helpmsg()
|
||||||
|
sys.exit()
|
||||||
|
elif opt == "-s":
|
||||||
|
verbose = False
|
||||||
|
|
||||||
|
if len(args) > 0:
|
||||||
|
extid = args[0]
|
||||||
|
else:
|
||||||
|
helpmsg()
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
ch = logging.StreamHandler(sys.stderr)
|
||||||
|
ch.setFormatter(logging.Formatter(const_log_format("server.py")))
|
||||||
|
logger.addHandler(ch)
|
||||||
|
if verbose:
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
main_parser = argparse.ArgumentParser(
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
description="Server Service module.",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-c",
|
||||||
|
"--cmd",
|
||||||
|
metavar="cmd",
|
||||||
|
choices=["InitializeSupplyChain", "Set-up", "MPCFile", "RunningServer"],
|
||||||
|
default="InitializeSupplyChain",
|
||||||
|
help="Command:\n"
|
||||||
|
+ " InitializeSupplyChain: Create main folders and generate Root Certificate\n"
|
||||||
|
+ " Set-up: Set up required files and MAC-keys-Data Folder\n"
|
||||||
|
+ " MPCFile: Creation and Compiling of MPC File-Programs/SuppAggChain Folder\n"
|
||||||
|
+ " RunningServer: Serving the server to clients",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--portNumber",
|
||||||
|
type=int,
|
||||||
|
default="4999",
|
||||||
|
help="give port number to server",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-rc",
|
||||||
|
"--rootCompany",
|
||||||
|
default="",
|
||||||
|
help="give root company name",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-root",
|
||||||
|
"--rootDir",
|
||||||
|
default="",
|
||||||
|
help="give the directory of root company",
|
||||||
|
)
|
||||||
|
main_parser.add_argument(
|
||||||
|
"-v",
|
||||||
|
"--verbose",
|
||||||
|
action="store_true",
|
||||||
|
default=const_verbose(),
|
||||||
|
help="increase verbosity",
|
||||||
|
)
|
||||||
|
main_conf = main_parser.parse_args()
|
||||||
|
sys.exit(main(main_conf))
|
|
@ -0,0 +1,71 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#############################################################################
|
||||||
|
# Copyright (c) 2019-2021 University of Exeter, UK
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#############################################################################
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
#SM=${SCALE_MAMBA:-../SCALE-MAMBA}
|
||||||
|
TEMPLATE=template/mpc
|
||||||
|
#SM_DEPENDENCIES="Player.x Setup.x compile.py Compiler Circuits/Bristol"
|
||||||
|
print_help()
|
||||||
|
{
|
||||||
|
echo "Usage: update-template"
|
||||||
|
echo ""
|
||||||
|
echo "Run ..."
|
||||||
|
echo ""
|
||||||
|
echo " --remove, -r remove/uninstall SCALE-MAMBA dependencies"
|
||||||
|
echo " --help, -h display this help message"
|
||||||
|
}
|
||||||
|
|
||||||
|
update_sm()
|
||||||
|
{
|
||||||
|
mkdir -p $TEMPLATE
|
||||||
|
for i in "$TEMPLATE"/*.x; do
|
||||||
|
echo "Stripping $i ..."
|
||||||
|
strip $i
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
remove_sm()
|
||||||
|
{
|
||||||
|
rm -r $TEMPLATE
|
||||||
|
}
|
||||||
|
|
||||||
|
remove="false"
|
||||||
|
|
||||||
|
while [ $# -gt 0 ]
|
||||||
|
do
|
||||||
|
case "$1" in
|
||||||
|
--help|-h)
|
||||||
|
print_help
|
||||||
|
exit 0;;
|
||||||
|
|
||||||
|
--remove|-r)
|
||||||
|
remove='true';;
|
||||||
|
*) print_help
|
||||||
|
exit 0;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
update_sm
|
||||||
|
|
||||||
|
exit 0;
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue