Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_init(self):
class CMDTest(CMD):
arg_nope_present = Arg("nope", default=False)
arg_ignored = Arg("ignored")
cmd = CMDTest(nope=True)
self.assertTrue(getattr(cmd, "log", False))
self.assertTrue(getattr(cmd, "nope", False))
"""
CLI_FORMATTER_CLASS = argparse.RawDescriptionHelpFormatter
arg_bits = Arg(
"-bits", help="Number of bits to use for key", default=4096, type=int
)
arg_key = Arg("-key", help="Path to client key file", default="client.key")
arg_cert = Arg(
"-cert", help="Path to client cert file", default="client.pem"
)
arg_csr = Arg("-csr", help="Path to client csr file", default="client.csr")
arg_server_key = Arg(
"-server-key", help="Path to server key file", default="server.key"
)
arg_server_cert = Arg(
"-server-cert", help="Path to server cert file", default="server.pem"
)
async def run(self):
subprocess.check_call(
[
"openssl",
"req",
"-newkey",
f"rsa:{self.bits}",
"-keyout",
self.key,
"-out",
self.csr,
"-nodes",
"-sha256",
def create_from_skel(name):
"""
Copies samples out of skel/ and does re-naming.
"""
class CreateCMD(CMD):
skel = Skel()
arg_user = Arg(
"-user",
help=f"Your username (default: {USER})",
default=USER,
required=False,
)
arg_name = Arg(
"-name",
help=f"Your name (default: {NAME})",
default=NAME,
required=False,
)
arg_email = Arg(
"-email",
help=f"Your email (default: {EMAIL})",
default=EMAIL,
required=False,
)
arg_description = Arg(
"-description",
help=f"Description of python package (default: DFFML {name} {{package name}})",
default=None,
required=False,
nargs="+",
required=True,
type=Feature.load,
action=list_action(Features),
help="Features to train on",
),
)
for param in inspect.signature(cls.SCIKIT_MODEL).parameters.values():
# TODO if param.default is an array then Args needs to get a
# nargs="+"
cls.config_set(
args,
above,
param.name,
Arg(
type=cls.type_for(param),
default=NoDefaultValue
if param.default == inspect._empty
else param.default,
),
)
return args
skel = Skel()
arg_user = Arg(
"-user",
help=f"Your username (default: {USER})",
default=USER,
required=False,
)
arg_name = Arg(
"-name",
help=f"Your name (default: {NAME})",
default=NAME,
required=False,
)
arg_email = Arg(
"-email",
help=f"Your email (default: {EMAIL})",
default=EMAIL,
required=False,
)
arg_description = Arg(
"-description",
help=f"Description of python package (default: DFFML {name} {{package name}})",
default=None,
required=False,
)
arg_target = Arg(
"-target",
help=f"Directory to put code in (default: same as package name)",
default=None,
required=False,
"""
Set a models model dir.
"""
arg_model = Arg(
"-model", help="Model used for ML", type=Model.load, required=True
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model = self.model.withconfig(self.extra_config)
class PortCMD(CMD):
arg_port = Arg("port", type=Port.load)
class KeysCMD(CMD):
arg_keys = Arg(
"-keys",
help="Key used for source lookup and evaluation",
nargs="+",
required=True,
)
parsers: Dict[str, BaseConfigLoader] = {}
for path in self.dataflows:
_, exported = await BaseConfigLoader.load_file(
parsers, exit_stack, path
)
merge(merged, exported)
# Export the dataflow
dataflow = DataFlow._fromdict(**merged)
async with self.config(BaseConfig()) as configloader:
async with configloader() as loader:
exported = dataflow.export(linked=not self.not_linked)
print((await loader.dumpb(exported)).decode())
class Create(CMD):
arg_operations = Arg(
"operations", nargs="+", help="Operations to create a dataflow for"
)
arg_config = Arg(
"-config",
help="ConfigLoader to use",
type=BaseConfigLoader.load,
default=JSONConfigLoader,
)
arg_not_linked = Arg(
"-not-linked",
dest="not_linked",
help="Do not export dataflows as linked",
default=False,
action="store_true",
)
]
)
class CreateTLS(TLSCMD):
"""
Create TLS certificates for server and client authentication
"""
server = CreateTLSServer
client = CreateTLSClient
class MultiCommCMD(CMD):
mc_config = Arg(
"-mc-config",
dest="mc_config",
default=None,
help="MultiComm config directory",
)
mc_atomic = Arg(
"-mc-atomic",
dest="mc_atomic",
action="store_true",
default=False,
help="No routes other than dataflows registered at startup",
)
class Server(TLSCMD, MultiCommCMD, Routes):
"""
async with sources() as sctx:
for key in self.keys:
repo = await sctx.repo(key)
pdb.set_trace()
await sctx.update(repo)
class Merge(CMD):
"""
Merge repo data between sources
"""
arg_dest = Arg(
"dest", help="Sources merge repos into", type=BaseSource.load_labeled
)
arg_src = Arg(
"src", help="Sources to pull repos from", type=BaseSource.load_labeled
)
async def run(self):
async with self.src.withconfig(
self.extra_config
) as src, self.dest.withconfig(self.extra_config) as dest:
async with src() as sctx, dest() as dctx:
async for src in sctx.repos():
repo = Repo(src.src_url)
repo.merge(src)
repo.merge(await dctx.repo(repo.src_url))
await dctx.update(repo)
class ImportExportCMD(PortCMD, SourcesCMD):
required=False,
)
arg_simple = Arg(
"-simple",
help="Don't display input and output names",
default=False,
action="store_true",
required=False,
)
arg_display = Arg(
"-display",
help="How to display (TD: top down, LR, RL, BT)",
default="TD",
required=False,
)
arg_dataflow = Arg("dataflow", help="File containing exported DataFlow")
arg_config = Arg(
"-config",
help="ConfigLoader to use for importing",
type=BaseConfigLoader.load,
default=None,
)
async def run(self):
dataflow_path = pathlib.Path(self.dataflow)
config_cls = self.config
if config_cls is None:
config_type = dataflow_path.suffix.replace(".", "")
config_cls = BaseConfigLoader.load(config_type)
async with config_cls.withconfig(self.extra_config) as configloader:
async with configloader() as loader:
exported = await loader.loadb(dataflow_path.read_bytes())