first commit
This commit is contained in:
commit
f41309e21b
6 changed files with 543 additions and 0 deletions
243
.gitignore
vendored
Normal file
243
.gitignore
vendored
Normal file
|
@ -0,0 +1,243 @@
|
||||||
|
# ---> JetBrains
|
||||||
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||||
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
|
# User-specific stuff
|
||||||
|
.idea/**/workspace.xml
|
||||||
|
.idea/**/tasks.xml
|
||||||
|
.idea/**/usage.statistics.xml
|
||||||
|
.idea/**/dictionaries
|
||||||
|
.idea/**/shelf
|
||||||
|
|
||||||
|
# AWS User-specific
|
||||||
|
.idea/**/aws.xml
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
.idea/**/contentModel.xml
|
||||||
|
|
||||||
|
# Sensitive or high-churn files
|
||||||
|
.idea/**/dataSources/
|
||||||
|
.idea/**/dataSources.ids
|
||||||
|
.idea/**/dataSources.local.xml
|
||||||
|
.idea/**/sqlDataSources.xml
|
||||||
|
.idea/**/dynamic.xml
|
||||||
|
.idea/**/uiDesigner.xml
|
||||||
|
.idea/**/dbnavigator.xml
|
||||||
|
|
||||||
|
# Gradle
|
||||||
|
.idea/**/gradle.xml
|
||||||
|
.idea/**/libraries
|
||||||
|
|
||||||
|
# Gradle and Maven with auto-import
|
||||||
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
|
# auto-import.
|
||||||
|
# .idea/artifacts
|
||||||
|
# .idea/compiler.xml
|
||||||
|
# .idea/jarRepositories.xml
|
||||||
|
# .idea/modules.xml
|
||||||
|
# .idea/*.iml
|
||||||
|
# .idea/modules
|
||||||
|
# *.iml
|
||||||
|
# *.ipr
|
||||||
|
|
||||||
|
# CMake
|
||||||
|
cmake-build-*/
|
||||||
|
|
||||||
|
# Mongo Explorer plugin
|
||||||
|
.idea/**/mongoSettings.xml
|
||||||
|
|
||||||
|
# File-based project format
|
||||||
|
*.iws
|
||||||
|
|
||||||
|
# IntelliJ
|
||||||
|
out/
|
||||||
|
|
||||||
|
# mpeltonen/sbt-idea plugin
|
||||||
|
.idea_modules/
|
||||||
|
|
||||||
|
# JIRA plugin
|
||||||
|
atlassian-ide-plugin.xml
|
||||||
|
|
||||||
|
# Cursive Clojure plugin
|
||||||
|
.idea/replstate.xml
|
||||||
|
|
||||||
|
# SonarLint plugin
|
||||||
|
.idea/sonarlint/
|
||||||
|
|
||||||
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
|
com_crashlytics_export_strings.xml
|
||||||
|
crashlytics.properties
|
||||||
|
crashlytics-build.properties
|
||||||
|
fabric.properties
|
||||||
|
|
||||||
|
# Editor-based Rest Client
|
||||||
|
.idea/httpRequests
|
||||||
|
|
||||||
|
# Android studio 3.1+ serialized cache file
|
||||||
|
.idea/caches/build_file_checksums.ser
|
||||||
|
|
||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||||
|
.pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
97
channel_helper.py
Normal file
97
channel_helper.py
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
# ############################################################################
|
||||||
|
# channel_helper.py for PDC 2025 (DO NOT EDIT!!)
|
||||||
|
# =========
|
||||||
|
# Author : Sepand KASHANI [sepand.kashani@epfl.ch]
|
||||||
|
# ############################################################################
|
||||||
|
|
||||||
|
import struct
|
||||||
|
import numpy as np
|
||||||
|
import io
|
||||||
|
|
||||||
|
|
||||||
|
def send_msg(sock, header, data):
|
||||||
|
"""
|
||||||
|
Send a packet over the network.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sock : :py:class:`~socket.socket`
|
||||||
|
header : bytes
|
||||||
|
(4,) byte string.
|
||||||
|
data : :py:class:`~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
if len(header) != 4:
|
||||||
|
raise ValueError('Parameter[header]: expected byte() of length 4.')
|
||||||
|
|
||||||
|
with io.BytesIO() as f:
|
||||||
|
np.save(f, data)
|
||||||
|
byte_data = f.getvalue()
|
||||||
|
|
||||||
|
# Pack message length
|
||||||
|
msg = (struct.pack('>I', len(header) + len(byte_data)) +
|
||||||
|
header + byte_data)
|
||||||
|
sock.sendall(msg)
|
||||||
|
|
||||||
|
def recv_msg(sock, N_byte_max=None):
|
||||||
|
"""
|
||||||
|
Receive a packet from the network.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sock : :py:class:`~socket.socket`
|
||||||
|
N_byte_max : int
|
||||||
|
Maximum number of bytes to accept. (None = unlimited.)
|
||||||
|
:py:class:`RuntimeError` raised if threshold exceeded.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
header : bytes
|
||||||
|
(4,) byte string
|
||||||
|
data : :py:class:`~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
if (N_byte_max is not None):
|
||||||
|
if not (N_byte_max > 0):
|
||||||
|
raise TypeError('Parameter[N_byte_max] must be positive.')
|
||||||
|
else:
|
||||||
|
N_byte_max = np.inf
|
||||||
|
|
||||||
|
# Extract message length
|
||||||
|
N_msg_raw = recv_bytes(sock, 4)
|
||||||
|
N_msg = struct.unpack('>I', N_msg_raw)[0] # bytes
|
||||||
|
|
||||||
|
if N_msg > N_byte_max:
|
||||||
|
ip, port = sock.getpeername()
|
||||||
|
s_name = f'{ip}:{port}'
|
||||||
|
raise RuntimeError(f'{s_name} sends {N_msg:>-#9_d} bytes, but N_byte_max={N_byte_max:>-#9_d}.')
|
||||||
|
|
||||||
|
msg = recv_bytes(sock, N_msg)
|
||||||
|
header = msg[:4]
|
||||||
|
with io.BytesIO(msg[4:]) as f:
|
||||||
|
data = np.load(f)
|
||||||
|
return header, data
|
||||||
|
|
||||||
|
def recv_bytes(sock, N_byte):
|
||||||
|
"""
|
||||||
|
Receive bytes from the network.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
sock : :py:class:`~socket.socket`
|
||||||
|
N_byte : int
|
||||||
|
Number of bytes to read.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
byte_data : bytes
|
||||||
|
(N_byte,)
|
||||||
|
"""
|
||||||
|
packet_size = 2 ** 12
|
||||||
|
|
||||||
|
packets, N_byte_read = [], 0
|
||||||
|
while N_byte_read < N_byte:
|
||||||
|
packet = sock.recv(min(packet_size, N_byte - N_byte_read))
|
||||||
|
packets.append(packet)
|
||||||
|
N_byte_read += len(packet)
|
||||||
|
|
||||||
|
byte_data = b''.join(packets)
|
||||||
|
return byte_data
|
86
client.py
Normal file
86
client.py
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
# ############################################################################
|
||||||
|
# client.py for PDC 2025 (DO NOT EDIT!!)
|
||||||
|
# =========
|
||||||
|
# Original Author : Sepand KASHANI [sepand.kashani@epfl.ch]
|
||||||
|
# Current version: Adway Girish [adway.girish@epfl.ch]
|
||||||
|
# ############################################################################
|
||||||
|
|
||||||
|
"""
|
||||||
|
Black-box channel simulator. (client)
|
||||||
|
|
||||||
|
Instructions
|
||||||
|
------------
|
||||||
|
python3 client.py --input_file=[FILENAME] --output_file=[FILENAME] --srv_hostname=[HOSTNAME] --srv_port=[PORT]
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import pathlib
|
||||||
|
import socket
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
import channel_helper as ch
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(description="COM-302 black-box channel simulator. (client)",
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
epilog="To promote efficient communication schemes, transmissions are limited to 1 Mega-sample.")
|
||||||
|
|
||||||
|
parser.add_argument('--input_file', type=str, required=True,
|
||||||
|
help='.txt file containing (N_sample,) rows of float samples.')
|
||||||
|
parser.add_argument('--output_file', type=str, required=True,
|
||||||
|
help='.txt file to which channel output is saved.')
|
||||||
|
parser.add_argument('--srv_hostname', type=str, required=True,
|
||||||
|
help='Server IP address.')
|
||||||
|
parser.add_argument('--srv_port', type=int, required=True,
|
||||||
|
help='Server port.')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
args.input_file = pathlib.Path(args.input_file).resolve(strict=True)
|
||||||
|
if not (args.input_file.is_file() and
|
||||||
|
(args.input_file.suffix == '.txt')):
|
||||||
|
raise ValueError('Parameter[input_file] is not a .txt file.')
|
||||||
|
|
||||||
|
args.output_file = pathlib.Path(args.output_file).resolve(strict=False)
|
||||||
|
if not (args.output_file.suffix == '.txt'):
|
||||||
|
raise ValueError('Parameter[output_file] is not a .txt file.')
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
version_cl = b'dUV' # Always length-3 alphanumeric
|
||||||
|
|
||||||
|
args = parse_args()
|
||||||
|
tx_p_signal = np.loadtxt(args.input_file)
|
||||||
|
|
||||||
|
N_sample = tx_p_signal.size
|
||||||
|
if not ((tx_p_signal.shape == (N_sample,)) and
|
||||||
|
np.issubdtype(tx_p_signal.dtype, np.floating)):
|
||||||
|
raise ValueError('Parameter[input_file] must contain a real-valued sequence.')
|
||||||
|
|
||||||
|
if N_sample > 1000000:
|
||||||
|
raise ValueError(('Parameter[input_file] contains more than 1,000,000 samples. '
|
||||||
|
'Design a more efficient communication system.'))
|
||||||
|
|
||||||
|
energy = np.sum(np.square(np.abs(tx_p_signal)))
|
||||||
|
if energy > 2000:
|
||||||
|
raise ValueError(('Energy of the signal exceeds the limit 2,000. '
|
||||||
|
'Design a more efficient communication system.'))
|
||||||
|
|
||||||
|
with socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM) as sock_cl:
|
||||||
|
sock_cl.connect((args.srv_hostname, args.srv_port))
|
||||||
|
|
||||||
|
tx_header = b'0' + version_cl
|
||||||
|
ch.send_msg(sock_cl, tx_header, tx_p_signal)
|
||||||
|
|
||||||
|
rx_header, rx_data = ch.recv_msg(sock_cl)
|
||||||
|
if rx_header[:1] == b'0': # Data
|
||||||
|
np.savetxt(args.output_file, rx_data)
|
||||||
|
elif rx_header[:1] == b'1': # Rate limit
|
||||||
|
raise Exception(rx_data.tobytes())
|
||||||
|
elif rx_header[:1] == b'2': # Outdated version of client.py
|
||||||
|
raise Exception(rx_data.tobytes())
|
||||||
|
else: # Unknown header
|
||||||
|
err_msg = f'Unknown header: {rx_header}'
|
||||||
|
raise Exception(err_msg)
|
1
message.txt
Normal file
1
message.txt
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Hello this is my message with 40 chars!
|
59
receiver.py
Normal file
59
receiver.py
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
ALPHABET = (
|
||||||
|
list('abcdefghijklmnopqrstuvwxyz') +
|
||||||
|
list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') +
|
||||||
|
list('0123456789') +
|
||||||
|
[' ', '.']
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_hadamard(n):
|
||||||
|
assert (n & (n - 1) == 0), "Hadamard order must be power of 2"
|
||||||
|
H = np.array([[1]])
|
||||||
|
while H.shape[0] < n:
|
||||||
|
H = np.block([
|
||||||
|
[ H, H],
|
||||||
|
[ H, -H]
|
||||||
|
])
|
||||||
|
return H
|
||||||
|
|
||||||
|
def decode_signal(signal, alphabet=ALPHABET):
|
||||||
|
code_length = 64
|
||||||
|
n_chars = len(signal) // code_length
|
||||||
|
H = get_hadamard(64)
|
||||||
|
scale = 1 / np.sqrt(code_length)
|
||||||
|
codebook = H * scale
|
||||||
|
|
||||||
|
decoded = []
|
||||||
|
for i in range(n_chars):
|
||||||
|
y = signal[i*code_length : (i+1)*code_length]
|
||||||
|
# The channel may have applied sqrt(10) gain to odds or evens
|
||||||
|
# We don't know which, so try both options and pick best
|
||||||
|
y_even = np.array(y)
|
||||||
|
y_even[::2] /= np.sqrt(10)
|
||||||
|
y_odd = np.array(y)
|
||||||
|
y_odd[1::2] /= np.sqrt(10)
|
||||||
|
# Try decoding both hypotheses
|
||||||
|
scores_even = codebook @ y_even
|
||||||
|
scores_odd = codebook @ y_odd
|
||||||
|
idx_even = np.argmax(scores_even)
|
||||||
|
idx_odd = np.argmax(scores_odd)
|
||||||
|
score_even = np.max(scores_even)
|
||||||
|
score_odd = np.max(scores_odd)
|
||||||
|
idx_best = idx_even if score_even > score_odd else idx_odd
|
||||||
|
decoded.append(alphabet[idx_best])
|
||||||
|
return ''.join(decoded)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
|
parser = argparse.ArgumentParser(description="Receiver: decode y.txt to recovered message for PDC Project.")
|
||||||
|
parser.add_argument('--input_file', type=str, required=True, help="Received y.txt from channel/server")
|
||||||
|
parser.add_argument('--output_file', type=str, required=True, help="Text file for the decoded message")
|
||||||
|
args = parser.parse_args()
|
||||||
|
y = np.loadtxt(args.input_file)
|
||||||
|
decoded = decode_signal(y)
|
||||||
|
with open(args.output_file, 'w') as f:
|
||||||
|
f.write(decoded)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
57
transmitter.py
Normal file
57
transmitter.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
import numpy as np
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Character Set and coding
|
||||||
|
ALPHABET = (
|
||||||
|
list('abcdefghijklmnopqrstuvwxyz') +
|
||||||
|
list('ABCDEFGHIJKLMNOPQRSTUVWXYZ') +
|
||||||
|
list('0123456789') +
|
||||||
|
[' ', '.']
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_hadamard(n):
|
||||||
|
assert (n & (n - 1) == 0), "Hadamard order must be power of 2"
|
||||||
|
H = np.array([[1]])
|
||||||
|
while H.shape[0] < n:
|
||||||
|
H = np.block([
|
||||||
|
[ H, H],
|
||||||
|
[ H, -H]
|
||||||
|
])
|
||||||
|
return H
|
||||||
|
|
||||||
|
def encode_message(msg, alphabet=ALPHABET):
|
||||||
|
msg = msg.strip()
|
||||||
|
if len(msg) != 40:
|
||||||
|
raise Exception("Message must be exactly 40 characters!")
|
||||||
|
# Get Hadamard codes
|
||||||
|
H = get_hadamard(64)
|
||||||
|
code_length = 64
|
||||||
|
# Normalize so signal energy stays bounded
|
||||||
|
# Each row has norm sqrt(64) = 8, so scale down by 1/8
|
||||||
|
scale = 1 / np.sqrt(code_length)
|
||||||
|
signals = []
|
||||||
|
for c in msg:
|
||||||
|
idx = alphabet.index(c)
|
||||||
|
signals.append(H[idx] * scale)
|
||||||
|
signal = np.concatenate(signals)
|
||||||
|
# Energy check (should be << 2000)
|
||||||
|
assert signal.shape[0] == 2560
|
||||||
|
energy = np.sum(signal ** 2)
|
||||||
|
if energy > 2000:
|
||||||
|
raise Exception("Signal energy above allowed!")
|
||||||
|
return signal
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
|
parser = argparse.ArgumentParser(description="Message to signal encoder for PDC Project")
|
||||||
|
parser.add_argument('--message_file', type=str, required=True, help="Text file with exactly 40 chars.")
|
||||||
|
parser.add_argument('--output_file', type=str, required=True, help="Output signal file for client.py.")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
with open(args.message_file, 'r') as f:
|
||||||
|
msg = f.read().strip()
|
||||||
|
x = encode_message(msg)
|
||||||
|
np.savetxt(args.output_file, x)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
Loading…
Add table
Reference in a new issue