datashards tests

main
Bob Mottram 2021-06-10 20:53:26 +01:00
parent bb4afef15f
commit 189d1b8ed9
20 changed files with 1109 additions and 0 deletions

View File

@ -26,4 +26,6 @@ clean:
rm -f deploy/*~
rm -f translations/*~
rm -rf __pycache__
rm -f datashards/*~
rm -rf datashards/__pycache__
rm -f calendar.css blog.css epicyon.css follow.css login.css options.css search.css suspended.css

202
datashards/LICENSE 100644
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,5 @@
name='Datashards'
version='0.3.5'
from . import stores
from . import client
from . import utils

View File

@ -0,0 +1,271 @@
__filename__ = "client.py"
__author__ = "Serge Wroclawski"
__author_email__ = 'serge@wroclawski.org'
__license__ = "Apache 2.0"
__version__ = "0.1"
__maintainer__ = "Bob Mottram"
__email__ = "bob@freedombone.net"
__status__ = "Production"
from base64 import urlsafe_b64decode, urlsafe_b64encode
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
import os
import secrets
import hashlib
import datashards.usexp
# The size of the IV in Python Cryptdome should be 32 bytes
IV_SIZE = 16
CHUNK_SIZE = 32768
MAX_RAW_SIZE = CHUNK_SIZE - 13 # 13 is the number of bits for sexp
KEY_SIZE = 32
BACKEND = default_backend()
def generate_key(length=KEY_SIZE):
"""Generate a random key of length
Args:
length (int): The size of the key
Returns:
string: The random key
"""
return secrets.token_bytes(length)
def make_iv(key, prefix, count=0):
"""Make the initiaization vector for encryption/decryption
Args:
key (bytes): The symmetrical key
prefix (str): The prefix to use ("entry" or "content")
count (int): The counter (defaults to 0)
Returns:
bytes: The initialization vector in bytes
"""
# TODO: This needs to switch to appending together bytes
c = str(count).encode('latin-1')
raw = prefix + c + key
return hashlib.sha256(raw).digest()[:IV_SIZE]
def encrypt_shard_entry(data, key):
"""Encrypt a raw file
Args:
data (bytes): The data to be encrypted
key (bytes): The symmetrical key
Returns:
bytes: The encrypted data
"""
iv = make_iv(key, b'entry-point')
cipher = Cipher(algorithms.AES(key), modes.CTR(iv), backend=BACKEND)
encryptor = cipher.encryptor()
return encryptor.update(data) + encryptor.finalize()
def decrypt_shard_entry(data, key):
"""Decrypt an entry shard file
Args:
data (bytes): The bytes to be decrypted
key (bytes): The symmetical key
Returns:
bytes: The decrypted data
"""
iv = make_iv(key, b'entry-point')
cipher = Cipher(algorithms.AES(key), modes.CTR(iv), backend=BACKEND)
decryptor = cipher.decryptor()
return decryptor.update(data) + decryptor.finalize()
def encrypt_shard_chunk(data, key, count):
"""Encrypt a file chunk
Args:
data (bytes): The data to be encrypted
key (bytes): The symmetrical key
count (int): The block count
Returns:
bytes: The encrypted data
"""
iv = make_iv(key, b'content', count)
cipher = Cipher(algorithms.AES(key), modes.CTR(iv), backend=BACKEND)
encryptor = cipher.encryptor()
return encryptor.update(data) + encryptor.finalize()
def decrypt_shard_chunk(data, key, count):
"""Decrypt a file chunk
Args:
data (bytes): The data to be decrypted
key (bytes): The symmetrical key
count (int): The block count
Returns:
bytes: The decrypted data
"""
iv = make_iv(key, b'content', count)
cipher = Cipher(algorithms.AES(key), modes.CTR(iv), backend=BACKEND)
decryptor = cipher.decryptor()
return decryptor.update(data) + decryptor.finalize()
def make_manifest(xts, size):
"""Create a manifest
Args:
urns (list): A list of the URNS for the chunks
Returns:
bytes: The raw (unencrypted) manifest
"""
manifest_data = ["manifest", size] + xts
manifest = usexp.dumpb(manifest_data)
size = len(manifest)
if size > MAX_RAW_SIZE:
raise NotImplementedError("Manifest too large")
return manifest
def pad(data, size=CHUNK_SIZE):
"""Pad data to 32k
Args:
data (bytes): The data to pad
size (int): The size of the destination
Returns:
bytes: The padded data
"""
data_size = len(data)
return data + (b'\0' * (size - data_size))
def read_manifest(mlist):
"""Takes in a manifest list and coerces the correct data structures from it
Args:
mlist (list): The manifest in list form
Returns:
list: A usable manifest list
"""
manifest = [mlist[0].decode(), int(mlist[1])]
xts = [i.decode() for i in mlist[2:]]
manifest = manifest + xts
return manifest
def make_raw_shard(data):
"""Create a raw shard
Args:
data (bytes): The data
Returns
bytes: The data as a Data Shard raw entity
"""
raw = ['raw', data]
return usexp.dumpb(raw)
class Client():
def __init__(self, store):
self.store = store
def upload(self, fd, keyfun=generate_key):
"""Upload a file to a store
Args:
fd (file-like object): The file to send
keyfun (function): Function to generate the key (used for testing)
Raises:
NotImplementedError: If the store does not support the 'put' method
"""
if not hasattr(self.store, 'put'):
raise NotImplementedError("Store doesn't support the 'put' method")
size = os.fstat(fd.fileno()).st_size
key = keyfun()
if size <= MAX_RAW_SIZE:
# If file is smaller than max raw file size, create a "raw" entity
data = fd.read()
sexp = make_raw_shard(data)
padded = pad(sexp)
encrypted_data = encrypt_shard_entry(padded, key)
xt_urn = self.store.put(encrypted_data)
xt = xt_urn.split(':')[2]
b64key = urlsafe_b64encode(key).rstrip(b'=').decode()
return f"idsc:p0.{xt}.{b64key}"
else:
xts = []
count = 0
current_size = 0
while current_size <= size:
raw_data = fd.read(CHUNK_SIZE)
if len(raw_data) < CHUNK_SIZE:
raw_data = pad(raw_data)
data = encrypt_shard_chunk(raw_data, key, count)
xt_urn = self.store.put(data)
xts.append(xt_urn)
count += 1
current_size += CHUNK_SIZE
# Finally generate the manifest
manifest = make_manifest(xts, size)
padded_manifest = pad(manifest)
encrypted_manifest = encrypt_shard_entry(padded_manifest, key)
xt_urn = self.store.put(encrypted_manifest)
xt = xt_urn.split(':')[2]
b64key = urlsafe_b64encode(key).rstrip(b'=').decode()
return f"idsc:p0.{xt}.{b64key}"
def download(self, urn, fd):
"""Download a file from a store
Takes a URN and writes the data to the file descriptor
Args:
urn (string): The URN of the file
fd (file-like object): A file object to write the file to
Raises:
NotImplementedError: If the store does not support 'get'
"""
if not hasattr(self.store, 'get'):
raise NotImplementedError("Store does not support 'get' method")
scheme, payload = urn.split(':')
if scheme != 'idsc':
raise NotImplementedError("Client can only handle IDSCs")
enc_suite, xt, b64key_prepad = payload.split('.')
pad = "=" * (4 - (len(b64key_prepad) % 4))
b64key = b64key_prepad + pad
key = urlsafe_b64decode(b64key)
xt_urn = f"urn:sha256d:{xt}"
encrypted_data = self.store.get(xt_urn)
decrypted_data = decrypt_shard_entry(encrypted_data, key)
data = usexp.loadb(decrypted_data)
if data[0] == b'raw':
fd.write(data[1])
fd.flush()
return
elif data[0] == b'manifest':
manifest = read_manifest(data)
size, chunks = manifest[1], manifest[2:]
# We need to assemble the pieces
i = 0
current_size = 0
for chunk in chunks:
encrypted_data = self.store.get(chunk)
data = decrypt_shard_chunk(encrypted_data, key, i)
current_size += CHUNK_SIZE
if current_size > size:
fd.write(data[:size % CHUNK_SIZE])
fd.flush()
else:
fd.write(data)
fd.flush()
i += 1

View File

@ -0,0 +1,5 @@
from .base import StoreError, BaseStore, GetStore, PutStore, CatalogStore, UsedStore, FreeupStore
from .memorystore import MemoryStore
from .filestore import FileStore
from .remotemagencstore import RemoteMagencStore
from .fizzgig import RemoteFizzgigStore

View File

@ -0,0 +1,177 @@
import hashlib
import base64
import random
notimplemented = "This method is inherited from an abastract base class"
class StoreError(Exception):
pass
class BaseStore():
"""This is the core abstract base store that offers validation"""
# Currently only sha256 is supported
_hash_algorithms = ('sha256d')
_shard_size = 32768
def validate_xt(self, xt):
"""Validate the XT
Args:
xt (str): The shard in XT form ``urn:<algorith>:<hash>``
Returns:
tuple(str): The urn, algorithm and digest
Raises:
ValueError: Raised if the XT is invalid
"""
try:
scheme, algorithm, digest = xt.split(':')
except ValueError:
raise ValueError(f"XT must be in the form urn:<algorithm>:<hash>. Instead we have {xt}")
if scheme != 'urn':
raise ValueError("XTs must begin with 'urn'")
if algorithm not in self._hash_algorithms:
raise ValueError(f"Hashing algorithm {algorithm} not supported")
return scheme, algorithm, digest
def validate_data(self, data, sizes=(32768,)):
"""Validate data that will be stored
Args:
data (bytes): The data
Returns:
bool: True if valid
Raises:
ValueError: Raised if the data is invalid (wrong type or size)
"""
if not isinstance(data, bytes):
raise ValueError("Data must of type bytes")
if not len(data) in sizes:
raise ValueError("Data must be of supported size")
return True
def sha256d_data(self, data):
digest = hashlib.sha256(data).digest()
digest2 = hashlib.sha256(digest).digest()
encoded_digest = base64.urlsafe_b64encode(digest2)
return encoded_digest
def xt_from_digest(self, digest, algorithm='sha256d'):
if isinstance(digest, bytes):
digest = str(digest, 'utf-8')
return f"urn:{algorithm}:{digest}"
class GetStore():
"""This is the abstract base class for stores that have the "get" method"""
def get(self, xt):
"""Get a shard from the store by XT
Args:
xt (string): ID of the shard in XT form ``urn:<algorithm>:<hash>``
Returns:
bytes: The requested data as a bytearray
Raises:
KeyError: Raised when the requested XT is not found
ValueError: Raised when the XT is improperly formatted
NotImplementedError: Raised if XT uses an unsupported algorithm
StoreError: Raised if the store has an unknown internal error
"""
raise NotImplementedError(notimplemented)
class PutStore():
def put(self, data):
"""Place the data in the store
Args:
data (bytearray): The data to store
Currently this must be a 32k long byte array
Returns:
string: The URN of the data in XT form ``urn:<algorithm>:<hash>``
If the store supports multiple hashing algorithms, it will select
its preferred algorithm
Raises:
ValueError: Raised if data is of the wrong type or unsupported size
StoreError: Raised if the store has an unknown internal error
"""
raise NotImplementedError(notimplemented)
class DeleteStore():
def delete(self, *shard):
"""Delete a shard from the store
Args:
shards: Shard(s) to delete from the store
Raises:
KeyError: Raised when the requested shard is not found
ValueError: Raised when the XT is improperly formatted
StoreError: Raised if the store has an unknown internal error
"""
raise NotImplementedError(notimplemented)
class CatalogStore():
def catalog(self):
"""Get a listing of all the shards in the store
Returns:
list (string): A list of shards in the store in XT form
Raises:
StoreError: Raised if the store has an unknown internal error
"""
raise NotImplementedError(notimplemented)
def _random_shards(self, n=1):
"""Get a selection of random shards in the store
Args:
n (int): Number of random shards to retrieve
Returns:
list (string): A list of shards in XT form
Raises:
StoreError: Raised if the store has an unknown internal error
"""
return random.choices(self.catalog(), k=n)
class UsedStore(BaseStore, CatalogStore):
def used(self):
"""Get the storage used by the store in bytes
Returns:
int: The number of bytes used by the store
Raises:
StoreError: Raised if the store has an unknown internal error
"""
return len(self.catalog()) * self._shard_size
class FreeupStore(CatalogStore, DeleteStore):
def freeup(self, count=1):
"""Free up space in the store
This method will free up space in the store
and return the list of shards it has deleted
Args:
count (int); The number of items to delete from the store
Returns:
list (string): The list of deleted shards in XT form
Raises:
StoreError: Raised if the store has an unknown internal error
"""
# This may not work due to inheritance!
shards = random.choices(self.catalog(), k=count)
self.deletes(shards)
return shards

View File

@ -0,0 +1,68 @@
import os
from .base import BaseStore, GetStore, PutStore, \
FreeupStore, UsedStore, StoreError, CatalogStore
class FileStore(BaseStore, GetStore, PutStore, FreeupStore):
"""A datashards store with a file backend
"""
def __init__(self, directory=None, create_dir=False):
"""Instantiate the store
Args:
directory: The directory where the data should be stored
Returns:
A new FileStore isinstance
"""
if not os.path.isdir(directory):
if create_dir:
os.mkdir(directory)
else:
raise ValueError(f"Store directory {directory} does not exist")
self._dir = directory
def __repr__(self):
dir = os.path.abspath(self._dir)
return f"file://{dir}"
def get(self, xt):
self.__doc__ = GetStore.get.__doc__
digest = self.validate_xt(xt)[2]
path = os.path.join(self._dir, digest)
if os.path.exists(path):
try:
with open(path, 'rb') as fd:
return fd.read()
except OSError:
raise StoreError()
def put(self, data):
self.__doc__ = PutStore.put.__doc__
self.validate_data(data)
digest = str(self.sha256d_data(data), 'utf-8')
path = os.path.join(self._dir, digest)
if not os.path.exists(path):
try:
with open(path, 'wb') as fd:
fd.write(data)
except OSError:
raise StoreError()
xt = f"urn:sha256d:{digest}"
return xt
def catalog(self):
self.__doc__ = CatalogStore.catalog.__doc__
# We'll assume the store directory does not contain other files
return [self.xt_from_digest(f) for f in os.listdir(self._dir)
if os.path.isfile(os.path.join(self._dir, f))]
def delete(self, xts):
self.__doc__ = DeleteStore.deletes.__doc__
digests = [self.validate_data[xt][2] for xt in l]
for digest in digests:
path = os.path.join(self._dir, digest)
try:
os.remove(path)
except OSError:
raise StoreError()
return digests

View File

@ -0,0 +1,50 @@
import requests
from .base import BaseStore, GetStore, PutStore, StoreError
class RemoteFizzgigStore(BaseStore, GetStore, PutStore):
"""A remote Fizzgig store"""
def __init__(self, url):
"""Create a RemoteFizzgigStore
Args:
url (string): The location of the store
Returns:
RemoteFizzgigStore
"""
self.url = url
def __repr__(self):
return f"fizz+{self.url}"
def get(self, xt):
self.__doc__ = GetStore.get.__doc__
self.validate_xt(xt)
url = self.url + '/get'
payload = {'xt': xt}
r = requests.get(url, params=payload)
if r.status_code == 404:
raise KeyError("Shard not found")
return
elif r.status_code == 400:
raise ValueError(r.content.decode('utf-8'))
return
elif r.status_code == 500:
raise StoreError(r.content.decode('utf-8'))
return
return r.content
def put(self, data):
self.__doc__ = PutStore.put.__doc__
self.validate_data(data)
url = self.url + '/put'
r = requests.put(url=url, data=data)
if r.status_code == 400:
raise ValueError(r.content.decode('utf-8'))
return
elif r.status_code == 500:
raise StoreError(r.content.decode('utf-8'))
return
parsed = r.json()
return parsed['xt']

View File

@ -0,0 +1,52 @@
import http.server
import socketserver
from urllib.parse import urlparse, parse_qs
from .base import BaseStore, GetStore, PutStore, StoreError
from .memorystore import MemoryStore
memstore = MemoryStore()
class MagencStore(http.server.BaseHTTPRequestHandler):
def do_GET(self):
print(f"Request recieved: {self.path}")
try:
parsed = urlparse(self.path)
query = parsed.query
params = parse_qs(query)
xt = params['xt'][0]
result = memstore.get(xt)
except KeyError:
self.send_response(404)
self.end_headers()
self.wfile.write(f'Shard Not Found'.encode())
return
except ValueError as err:
self.send_response(400)
self.wfile.write(f"Malformed request: {err}".encode())
return
except Exception as err:
self.send_response(500)
self.wfile.write(f"Server Error: {err}".encode())
return
self.send_response(200)
self.send_header('Content-type', 'application/octet-stream')
self.end_headers()
self.wfile.write(result)
def do_POST(self):
#length = int(self.headers['Content-Length'])
#content = self.rfile.read(length)
content = self.rfile.read(32768)
try:
xt = memstore.put(content)
except ValueError as err:
self.send_response(400)
self.wfile.write(f"Malformed request: {err}".encode())
except Exception as err:
self.send_response(500)
self.wfile.write(f"Server Error: {err}".encode())
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(xt.encode())

View File

@ -0,0 +1,43 @@
import sys
from .base import BaseStore, GetStore, PutStore, CatalogStore, DeleteStore
class MemoryStore(BaseStore, GetStore,
PutStore, CatalogStore):
"""BasicStore is a basic datashards store with a memory backend.
This is more of an example than anything you'd use
"""
def __init__(self):
"""Create a new MemoryStore instance
returns:
A new `MemoryStore` object
"""
self._store = {}
def __repr__(self):
return "memory://"
def get(self, xt):
self.__doc__ = GetStore.get.__doc__
digest = super().validate_xt(xt)[2]
return self._store[digest]
def put(self, data):
self.__doc__ = PutStore.put.__doc__
super().validate_data(data)
digest = super().sha256d_data(data)
str_digest = d = str(digest, 'utf-8')
self._store[str_digest] = data
return self.xt_from_digest(digest)
def delete(self, *xts):
self.__doc__ = DeleteStore.delete.__doc__
digests = [self.validate_xt(xt)[2] for xt in xts]
for d in digests:
del(self._store[d])
def catalog(self):
self.__doc__ = CatalogStore.catalog.__doc__
return [self.xt_from_digest(digest) for digest in self._store.keys()]

View File

@ -0,0 +1,46 @@
import requests
from .base import BaseStore, GetStore, PutStore, StoreError
class RemoteMagencStore(BaseStore, GetStore, PutStore):
"""This is an impelmentation of the original Remote Magenc Store"""
def __init__(self, url):
"""Create a RemoteMagencStore
Args:
url (string): The location of the store
Returns:
RemoteMagencStore
"""
self.url = url
def __repr__(self):
return f"magenc+{self.url}"
def get(self, xt):
self.__doc__ = GetStore.get.__doc__
self.validate_xt(xt)
payload = {'xt': xt}
r = requests.get(self.url, params=payload)
if r.status_code == 404:
raise KeyError("Shard not found")
return
elif r.status_code == 400:
raise ValueError(r.content.decode('utf-8'))
return
elif r.status_code == 500:
raise StoreError(r.content.decode('utf-8'))
return
return r.content
def put(self, data):
self.__doc__ = PutStore.put.__doc__
self.validate_data(data)
r = requests.post(url=self.url, data=data)
if r.status_code == 400:
raise ValueError(r.content.decode('utf-8'))
return
elif r.status_code == 500:
raise StoreError(r.content.decode('utf-8'))
return
return r.text

115
datashards/usexp.py 100644
View File

@ -0,0 +1,115 @@
#!/usr/bin/env python3
"""
The Unorthodox Cannonical S-Expression Parser
"""
__version__ = "0.1"
from io import StringIO, IOBase, BytesIO
from collections import namedtuple
TypeHinted = namedtuple('TypeHinted', 'hint data')
digits = (b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9')
def read_hint(fd, pos):
hint = b''
while True:
ch = fd.read(1)
pos += 1
if ch == b']':
return hint, pos
else:
hint += ch
def read_list(fd, pos):
"""Read a list"""
out = []
read_ahead = ""
hint = None
while True:
ch = fd.read(1)
pos += 1
if ch == b')':
return (out, pos)
elif ch == b'(':
new_list, pos = read_list(fd, pos)
out.append(new_list)
elif ch == b'[':
hint, pos = read_hint(fd, pos)
elif ch == b':':
pos += 1
if not read_ahead:
raise ValueError(f"Colon but no read ahead at position {pos}")
else:
read_ahead = int(read_ahead)
raw = fd.read(read_ahead)
if hint:
out.append(TypeHinted(hint=hint.decode(), data=raw))
else:
out.append(raw)
pos += read_ahead
read_ahead = ''
hint = None
elif ch in digits:
read_ahead += ch.decode('ascii')
else:
raise ValueError(f"Unexpected {ch} at position {pos}")
def load(file):
"""Parse a file-like object"""
out = []
pos = 0
ch = file.read(1)
if not ch == b'(':
raise ValueError("Expected start of file to begin with (")
else:
out, trash = read_list(file, pos)
return out
def loadb(b):
"""Parses a bytestring"""
f = BytesIO(b)
return load(f)
def dump_bytes(b):
l = len(b)
return f"{l}:".encode() + b
def dump_string(s):
l = len(s)
return f"{l}:{s}".encode()
def dump_hinted(obj):
b = dump_bytes(obj[1])
return f"[{obj[0]}]".encode() + b
def dump_number(n):
return dump_string(f"{n}")
def dump_sequence(seq):
out = b''
for obj in seq:
if isinstance(obj, TypeHinted):
out += dump_hinted(obj)
elif isinstance(obj, (list, tuple)):
out += b'(' + dump_sequence(obj) + b')'
elif isinstance(obj, str):
out += dump_string(obj)
elif isinstance(obj, bytes):
out += dump_bytes(obj)
elif isinstance(obj, (int, float, complex)):
out += dump_number(obj)
else:
raise ValueError(f"Don't know how to serialize type {type(obj)}")
return out
def dumpb(seq):
out = b'(' + dump_sequence(seq) + b')'
return out
def dump(seq, fd):
out = dump_sequence(seq)
fd.write(seq)

View File

@ -0,0 +1,57 @@
from urllib.parse import urlparse, urlunparse
from .stores import MemoryStore, RemoteMagencStore, FileStore, RemoteFizzgigStore
from .client import Client
def filestore(u):
"""Take results of store and return FileStore object"""
# We only care about the path for this
return FileStore(u.path)
def memorystore(u):
return MemoryStore()
def magencstore(u):
# Remove magenc from the scheme and reassemble
l = list(u)
l[0] = l[0][7:]
return RemoteMagencStore(urlunparse(l))
def fizzgigstore(u):
l = list(u)
l[0] = l[0].lstrip('fizz+')
return RemoteFizzgigStore(urlunparse(l))
def store(uri):
"""Takes in a Datashards URI and returns the appropriate store for it
Args:
uri (string): The URI representation of the store
Returns:
Object: A datashards store
"""
scheme_map = {
'file': filestore,
'memory': memorystore,
'magenc': magencstore,
'fizz': fizzgigstore,
}
parsed = urlparse(uri)
scheme = parsed.scheme.split('+')[0]
if scheme in scheme_map:
return scheme_map[scheme](parsed)
else:
raise ValueError(f"Unsupported scheme for store {scheme}")
def client(uri):
"""Create a client tied to the store sent by uri
Args:
uri (string): The URI representation of the store
Returns:
Client: A datashards client
"""
st = store(uri)
return Client(st)

View File

@ -116,6 +116,7 @@ from mastoapiv1 import getNicknameFromMastoApiV1Id
from webapp_post import prepareHtmlPostNickname
from webapp_utils import markdownToHtml
from speaker import speakerReplaceLinks
import datashards
testServerAliceRunning = False
testServerBobRunning = False
@ -3798,9 +3799,24 @@ def testRoles() -> None:
assert not actorHasRole(actorJson, "artist")
def testDatashards() -> None:
print('testDatashards')
shipper = datashards.utils.store('memory://')
assert shipper
saveData = open('README.md', 'rb').read()
data_size = len(saveData)
saveData = saveData + (b'\0' * (32768 - data_size))
assert len(saveData) == 32768
urn = shipper.put(saveData)
assert urn
loadData = shipper.get(urn)
assert loadData
def runAllTests():
print('Running tests...')
updateDefaultThemesList(os.getcwd())
testDatashards()
testFunctions()
testRoles()
testSkills()