lib.sedna.service.server

Subpackages

Submodules

Package Contents

Classes

InferenceServer

rest api server for inference

AggregationServer

AggregationServerV2

class lib.sedna.service.server.InferenceServer(model, servername, host: str = '127.0.0.1', http_port: int = 8080, max_buffer_size: int = 104857600, workers: int = 1)[source]

Bases: lib.sedna.service.server.base.BaseServer

rest api server for inference

start()[source]
model_info()[source]
predict(data: InferenceItem)[source]
class lib.sedna.service.server.AggregationServer(aggregation: str, host: str = None, http_port: int = None, exit_round: int = 1, participants_count: int = 1, ws_size: int = 10 * 1024 * 1024)[source]

Bases: lib.sedna.service.server.base.BaseServer

start()[source]

Start the server

async client_info(request: starlette.requests.Request)[source]
class lib.sedna.service.server.AggregationServerV2(data=None, estimator=None, aggregation=None, transmitter=None, chooser=None)[source]
start()[source]