Description
PR #35 uses nginx in a linux github actions test to to provide a proxy server that does basic authentication and then passes along requests to the normal IPFS RPC API. This should be replaced with a fully in-python solution so that dev work in the future can fully run on a local machine to generate coverage reports and pass muster, rather than relying on github actions results after uploading a commit to a PR.
While work on this was started, it had to be delayed for creating a zarr v3 compatible store that uses the HAMT, so I've left the previous work here so that it can be picked up.
The changes were all in test_zarr_ipfs.py
, and centered around a pytest fixture that spun up a rpc proxy on a random localhost port, and then provided that port to fixture consumers.
class ProxyHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.handle_request()
def do_POST(self):
self.handle_request()
def do_PUT(self):
self.handle_request()
def do_DELETE(self):
self.handle_request()
def handle_request(self):
parsed_url = urllib.parse.urlparse(self.path)
path = parsed_url.path
if path.startswith("/api/v0/"):
auth_valid = False
api_key = self.headers.get("X-API-Key")
if api_key == "test":
auth_valid = True
authorization = self.headers.get("Authorization")
# Bearer token
if authorization == "Bearer test":
auth_valid = True
# Basic Auth
if authorization == "Basic dGVzdDp0ZXN0":
auth_valid = True
if not auth_valid:
self.send_response(401)
self.send_header("Content-type", "text/plain")
self.end_headers()
self.wfile.write(b"Unauthorized: Invalid or missing authentication")
return
# Proxy to IPFS RPC API
try:
content_length = int(self.headers.get("Content-Length", 0))
post_data = self.rfile.read(content_length)
response = requests.post(
"http://127.0.0.1:5001" + path,
files={"file": post_data},
)
self.send_response(response.status_code)
for header, value in response.headers.items():
self.send_header(header, value)
self.end_headers()
self.wfile.write(response.content)
except requests.exceptions.RequestException as e:
self.send_response(500)
self.send_header("Content-type", "text/plain")
self.end_headers()
self.wfile.write(str(e).encode())
else:
self.send_response(404)
self.end_headers()
@pytest.fixture(scope="module")
def sample_rpc_proxy():
# Find a free port
rpc_proxy_port: int
with socket.socket() as s:
s.bind(("", 0)) # Bind to a free port provided by the host.
rpc_proxy_port = s.getsockname()[1] # Return the port number assigned.
httpd = http.server.ThreadingHTTPServer(("localhost", rpc_proxy_port), ProxyHandler)
thread = threading.Thread(target=httpd.serve_forever)
thread.start()
yield rpc_proxy_port
# Cleanup after tests
httpd.shutdown()
thread.join() # Wait for the server thread to finish
def test_proxy_server(sample_rpc_proxy: int):
rpc_proxy_uri_stem = f"http://127.0.0.1:{sample_rpc_proxy}"
ipfs_store = IPFSStore(rpc_uri_stem=rpc_proxy_uri_stem, api_key="test")
hamt = HAMT(store=ipfs_store)
for i in range(0, 100):
hamt[str(i)] = i
assert hamt[str(i)] == i
however there were issues when testing, and we kept getting "InvalidChunkLength" errors. Future work should follow this basic approach, although a specific implementation of the web server is definitely up for changing.