Skip to content

Projects and stacks

Paasify Project

paasify.projects

Project library

This handle the project entity.

This library provides two classes:

  • PaasifyProjectConfig: A project config
  • PaasifyProject: A project instance

Example:

test.py
from paasify.projects import PaasifyProject

prj = PaasifyProject.discover_project()
prj.dump()

PaasifyConfigExtraVars

Bases: NodeList, PaasifyObj

Paasify Project Configuration

Source code in paasify/projects.py
class PaasifyConfigExtraVars(NodeList, PaasifyObj):
    "Paasify Project Configuration"

    conf_default = []

    conf_schema = {
        "$schema": "http://json-schema.org/draft-07/schema#",
        "title": "Project extra vars files",
        "description": "Extra vars file to load for this project",
        "oneOf": [
            {
                "title": "List of file",
                "description": "List of files to load",
                "type": "array",
                "items": {
                    "type": "string",
                },
            },
            {
                "title": "String",
                "description": "A single file to load",
                "type": "string",
            },
            {
                "title": "No configuration",
                "description": "Disable extra_vars loading",
                "type": "null",
            },
        ],
        "examples": [
            {
                "extra_vars": [
                    "../common_vars.yml"
                    "common/common_vars.yml"
                    "/absolute/path/common_vars.yml"
                ],
            },
            {
                "extra_vars": "single/file/to_load.yml",
            },
            {
                "extra_vars": None,
            },
        ],
    }

    def node_hook_transform(self, payload):
        """Init PassifyRuntime"""

        if not payload:
            payload = []
        elif isinstance(payload, str):
            payload = [payload]
        return payload
node_hook_transform(payload)

Init PassifyRuntime

Source code in paasify/projects.py
def node_hook_transform(self, payload):
    """Init PassifyRuntime"""

    if not payload:
        payload = []
    elif isinstance(payload, str):
        payload = [payload]
    return payload

PaasifyProject

Bases: NodeMap, PaasifyObj

Paasify Project instance

Source code in paasify/projects.py
class PaasifyProject(NodeMap, PaasifyObj):
    "Paasify Project instance"

    conf_default = {
        "_runtime": {},
        "config": {},
        "sources": [],
        "stacks": [],
    }

    conf_children = [
        {
            "key": "config",
            "cls": PaasifyProjectConfig,
        },
        {
            "key": "sources",
            "cls": SourcesManager,
        },
        {
            "key": "stacks",
            "cls": StackManager,
        },
    ]

    conf_schema = {
        "$schema": "http://json-schema.org/draft-07/schema#",
        "type": "object",
        "title": "Paasify",
        "description": "Main paasify project settings. This defines the format of `paasify.yml`.",
        "additionalProperties": ALLOW_CONF_JUNK,
        "examples": [
            {
                "config": {
                    "tags_prefix": ["_paasify"],
                    "vars": {
                        "app_domain": "devbox.192.168.186.129.nip.io",
                        "app_expose_ip": "192.168.186.129",
                        "app_tz": "Europe/Paris",
                        "top_var1": "My value",
                        "top_var2": "TOP VAR1=> ${top_var1}",
                    },
                },
                "sources": [
                    {"default": {"url": "https://github.com/user/docker-compose.git"}}
                ],
                "stacks": [
                    {
                        "app": "default:traefik",
                        "path": "traefik",
                        "tags": [
                            "ep_http",
                            "expose_admin",
                            "debug",
                            {
                                "traefik-svc": {
                                    "traefik_net_external": False,
                                    "traefik_svc_port": "8080",
                                }
                            },
                        ],
                    },
                    {
                        "app": "default:minio",
                        "env": [
                            {"app_admin_passwd": "MY_PASS"},
                            {"app_image": "quay.io/minio/minio:latest"},
                        ],
                        "tags": [
                            {
                                "traefik-svc": {
                                    "traefik_svc_name": "minio-api",
                                    "traefik_svc_port": 9000,
                                }
                            },
                            {
                                "traefik-svc": {
                                    "traefik_svc_name": "minio-console",
                                    "traefik_svc_port": 9001,
                                }
                            },
                        ],
                    },
                    {"app": "default:authelia", "tags": ["traefik-svc"]},
                    {"app": "default:librespeed", "tags": ["traefik-svc"]},
                ],
            },
        ],
        "default": {},
        "properties": {
            "config": {
                "type": "object",
                "description": "See: schema prj_config",
            },
            "sources": {
                "type": "object",
                "description": "See: schema prj_sources",
            },
            "stacks": {
                "type": "array",
                "description": "See: schema prj_stacks",
            },
            "_runtime": {
                "title": "Project runtime variables",
                "type": "object",
                "description": "Internal object to pass context variables, internal use only or for troubleshooting purpose",
            },
        },
    }

    ident = "main"
    engine_cls = None
    runtime = None

    def node_hook_transform(self, payload):
        "Init configuration Project"

        # Create runtime instance child
        _runtime = payload.get("_runtime") or payload
        self.runtime = PaasifyProjectRuntime(
            parent=self, payload=_runtime, ident="ProjectRuntime"
        )

        # Inject payload
        if self.runtime.load_file is not False:
            self.log.info(f"Load project file: {self.runtime.config_file_path}")
            _payload = anyconfig.load(self.runtime.config_file_path)
            payload.update(_payload)

        # Create engine
        if not self.engine_cls:
            engine_name = self.runtime.engine or None
            self.engine_cls = EngineDetect().detect(engine=engine_name)

        return payload

    def node_hook_final(self):
        "Report for logging"
        ns = self.runtime.namespace
        self.log.info(f"Project '{ns}' loaded")
node_hook_final()

Report for logging

Source code in paasify/projects.py
def node_hook_final(self):
    "Report for logging"
    ns = self.runtime.namespace
    self.log.info(f"Project '{ns}' loaded")
node_hook_transform(payload)

Init configuration Project

Source code in paasify/projects.py
def node_hook_transform(self, payload):
    "Init configuration Project"

    # Create runtime instance child
    _runtime = payload.get("_runtime") or payload
    self.runtime = PaasifyProjectRuntime(
        parent=self, payload=_runtime, ident="ProjectRuntime"
    )

    # Inject payload
    if self.runtime.load_file is not False:
        self.log.info(f"Load project file: {self.runtime.config_file_path}")
        _payload = anyconfig.load(self.runtime.config_file_path)
        payload.update(_payload)

    # Create engine
    if not self.engine_cls:
        engine_name = self.runtime.engine or None
        self.engine_cls = EngineDetect().detect(engine=engine_name)

    return payload

PaasifyProjectConfig

Bases: NodeMap, PaasifyObj

Paasify Project Configuration

Source code in paasify/projects.py
class PaasifyProjectConfig(NodeMap, PaasifyObj):
    "Paasify Project Configuration"

    conf_default = {
        "namespace": None,
        "vars": {},
        "tags": [],
        "tags_suffix": [],
        "tags_prefix": [],
        "extra_vars": [],
    }

    conf_children = [
        {
            "key": "namespace",
        },
        {
            "key": "vars",
            "cls": PaasifyConfigVars,
        },
        {
            "key": "extra_vars",
            "cls": PaasifyConfigExtraVars,
        },
        # {
        #     "key": "tags",
        #     "cls": list,
        #     #"cls": StackTagMgr,
        # },
        # {
        #     "key": "tags_prefix",
        #     "cls": list,
        #     #"cls": StackTagMgr,
        # },
        # {
        #     "key": "tags_suffix",
        #     "cls": list,
        #    # "cls": StackTagMgr,
        # },
    ]

    conf_schema = {
        "$schema": "http://json-schema.org/draft-07/schema#",
        "title": "Paasify Project settings",
        "description": (
            "Configure main project settings. It provides global settings"
            " but also defaults vars and tags for all stacks."
        ),
        "oneOf": [
            {
                "type": "object",
                "additionalProperties": ALLOW_CONF_JUNK,
                "title": "Project configuration",
                "description": (
                    "Configure project as a dict value. "
                    "Most of these settings are overridable via environment vars."
                ),
                "default": {},
                "properties": {
                    "namespace": {
                        "title": "Project namespace",
                        "description": (
                            "Name of the project namespace. If not"
                            " set, defaulted to directory name"
                        ),
                        "oneOf": [
                            {
                                "title": "None",
                                "description": "Defaulted by the project dir name",
                                "type": "null",
                            },
                            {
                                "title": "String",
                                "description": "Custom namespace name string",
                                "type": "string",
                            },
                        ],
                    },
                    "extra_vars": PaasifyConfigExtraVars.conf_schema,
                    "vars": PaasifyConfigVars.conf_schema,
                    "tags": StackTagMgr.conf_schema,
                    "tags_suffix": StackTagMgr.conf_schema,
                    "tags_prefix": StackTagMgr.conf_schema,
                },
                "examples": [
                    {
                        "config": {
                            "namespace": "my_ns1",
                            "vars": [{"my_var1": "my_value1"}],
                            "tags": ["tag1", "tag2"],
                        },
                    }
                ],
            },
            {
                "type": "null",
                "title": "Empty",
                "description": "Use automatic conf if not set. You can still override conf values with environment vars.",
                "default": None,
                "examples": [
                    {
                        "config": None,
                    },
                    {
                        "config": {},
                    },
                ],
            },
        ],
    }

PaasifyProjectRuntime

Bases: NodeMap, PaasifyObj

Paasify Runtime Object (deprecated)

Source code in paasify/projects.py
class PaasifyProjectRuntime(NodeMap, PaasifyObj):
    "Paasify Runtime Object (deprecated)"

    conf_schema = {
        # TODO: "$schema": "http://json-schema.org/draft-07/schema#",
        "type": "object",
        "title": "Paasify Project settings",
        "additionalProperties": False,
        "properties": {
            "default_source": {
                "title": "",
                "description": "",
                "type": "string",
            },
            "cwd": {
                "title": "",
                "description": "",
                "type": "string",
            },
            "working_dir": {
                "title": "",
                "description": "",
                "oneOf": [
                    {"type": "string"},
                    {"type": "null"},
                ],
            },
            "engine": {
                "title": "Docker backend engine",
                "oneOf": [
                    {
                        "description": "Docker engine",
                        "type": "string",
                    },
                    {
                        "description": "Automatic",
                        "type": "null",
                    },
                ],
            },
            "filenames": {
                "oneOf": [
                    {
                        "title": "List of file to lookup",
                        "description": "List of string file names to lookup paasify.yaml files",
                        "type": "array",
                        "items": {
                            "type": "string",
                        },
                    },
                ],
            },
        },
    }

    conf_default = {
        "load_file": None,
        "root_hint": None,
        "default_source": "default",
        "cwd": os.getcwd(),
        "working_dir": None,
        "engine": None,
        "filenames": ["paasify.yml", "paasify.yaml"],
        "relative": None,
        "dump_payload_log": False,
        "no_tty": False,
    }

    def node_hook_transform(self, payload):
        """Init PassifyRuntime"""

        # Allow config as string !
        if isinstance(payload, str):
            root_hint = payload
            payload = {
                "root_hint": root_hint,
                "load_file": True,
            }

        # Create default config
        result = {}
        result = dict(self.conf_default)
        result.update(payload)

        # The payload is a dir or a config file
        root_hint = result.get("root_hint")
        filenames = result.get("filenames")
        _payload1 = self.get_ctx(root_hint, config_files=filenames)
        result.update(_payload1)

        # Build default runtime from root path
        root_path = result["root_path"]

        paasify_source_dir = get_paasify_pkg_dir()
        paasify_plugins_dir = os.path.join(paasify_source_dir, "assets", "plugins")
        private_dir = os.path.join(root_path, ".paasify")
        collection_dir = os.path.join(private_dir, "_collections")
        jsonnet_dir = os.path.join(private_dir, "plugins")

        # Env overrides
        if PAASIFY_ENV_COLLECTION_DIR in os.environ:
            collection_dir = os.environ[PAASIFY_ENV_COLLECTION_DIR]
            self.log.info(
                f"Default collection directory updated from environment: {PAASIFY_ENV_COLLECTION_DIR}={collection_dir}"
            )

        _payload2 = {
            "paasify_source_dir": paasify_source_dir,
            "paasify_plugins_dir": paasify_plugins_dir,
            "project_private_dir": private_dir,
            "project_collection_dir": collection_dir,
            "project_jsonnet_dir": jsonnet_dir,
        }
        result.update(_payload2)

        # Allow user to override parts
        result.update(payload)
        return result

    @classmethod
    def get_project_path2(cls, path, filenames=None):
        "Find the closest paasify config file"

        # if not path.startswith('/'):

        filenames = filenames or cls.filenames
        # filenames = self._node_root.config.filenames

        paths = list_parent_dirs(path)
        result = find_file_up(filenames, paths)

        return result

    @staticmethod
    def get_ctx(project_hint, config_files=None, cwd=None, relative=None):
        "Return a list of directory context"

        config_files = config_files or ["paasify.yml", "paasify.yaml"]
        cwd = cwd or os.getcwd()

        # Autofind config file in parents if None
        if project_hint is None:
            # Show relative by default when project_hint is None
            relative = True if relative is None else relative

            try:
                project_hint = PaasifyProjectRuntime.get_project_path2(
                    cwd, filenames=config_files
                )[0]
            except IndexError as err:
                config_files = "' or '".join(config_files)
                msg = f"Impossible to find any '{config_files}' in '{cwd}', or in above directories."
                raise error.ProjectNotFound(msg) from err

        # Check the project root:
        if os.path.isdir(project_hint):
            root_path = project_hint
            # TODO: Lookup for candidates instead taking the first
            config_file_name = config_files[0]

        elif os.path.isfile(project_hint):
            root_path = os.path.dirname(project_hint)
            config_file_name = os.path.basename(project_hint)
        else:
            msg = f"Impossible to find paasify project in: {project_hint}"
            raise error.ProjectNotFound(msg)

        assert root_path

        # Get more context
        if relative is None:
            relative = not os.path.isabs(project_hint)
        project_rel = os.path.relpath(root_path, start=cwd)
        project_abs = os.path.abspath(root_path)

        # Check if cwd inside
        sub_dir = None
        if project_abs != cwd and project_abs in cwd:
            sub_dir = cwd.replace(project_abs, "").strip("/")
        # elif project_abs != cwd:
        #     relative = False

        # Convert root_path
        root_path = project_rel if relative else project_abs

        result = {
            "namespace": os.path.basename(project_abs),
            "root_path": root_path,
            "config_file": config_file_name,
            "config_file_path": os.path.join(root_path, config_file_name),
            "relative": relative,
            "cwd": cwd,
            "sub_dir": sub_dir,
        }
        return result
get_ctx(project_hint, config_files=None, cwd=None, relative=None) staticmethod

Return a list of directory context

Source code in paasify/projects.py
@staticmethod
def get_ctx(project_hint, config_files=None, cwd=None, relative=None):
    "Return a list of directory context"

    config_files = config_files or ["paasify.yml", "paasify.yaml"]
    cwd = cwd or os.getcwd()

    # Autofind config file in parents if None
    if project_hint is None:
        # Show relative by default when project_hint is None
        relative = True if relative is None else relative

        try:
            project_hint = PaasifyProjectRuntime.get_project_path2(
                cwd, filenames=config_files
            )[0]
        except IndexError as err:
            config_files = "' or '".join(config_files)
            msg = f"Impossible to find any '{config_files}' in '{cwd}', or in above directories."
            raise error.ProjectNotFound(msg) from err

    # Check the project root:
    if os.path.isdir(project_hint):
        root_path = project_hint
        # TODO: Lookup for candidates instead taking the first
        config_file_name = config_files[0]

    elif os.path.isfile(project_hint):
        root_path = os.path.dirname(project_hint)
        config_file_name = os.path.basename(project_hint)
    else:
        msg = f"Impossible to find paasify project in: {project_hint}"
        raise error.ProjectNotFound(msg)

    assert root_path

    # Get more context
    if relative is None:
        relative = not os.path.isabs(project_hint)
    project_rel = os.path.relpath(root_path, start=cwd)
    project_abs = os.path.abspath(root_path)

    # Check if cwd inside
    sub_dir = None
    if project_abs != cwd and project_abs in cwd:
        sub_dir = cwd.replace(project_abs, "").strip("/")
    # elif project_abs != cwd:
    #     relative = False

    # Convert root_path
    root_path = project_rel if relative else project_abs

    result = {
        "namespace": os.path.basename(project_abs),
        "root_path": root_path,
        "config_file": config_file_name,
        "config_file_path": os.path.join(root_path, config_file_name),
        "relative": relative,
        "cwd": cwd,
        "sub_dir": sub_dir,
    }
    return result
get_project_path2(path, filenames=None) classmethod

Find the closest paasify config file

Source code in paasify/projects.py
@classmethod
def get_project_path2(cls, path, filenames=None):
    "Find the closest paasify config file"

    # if not path.startswith('/'):

    filenames = filenames or cls.filenames
    # filenames = self._node_root.config.filenames

    paths = list_parent_dirs(path)
    result = find_file_up(filenames, paths)

    return result
node_hook_transform(payload)

Init PassifyRuntime

Source code in paasify/projects.py
def node_hook_transform(self, payload):
    """Init PassifyRuntime"""

    # Allow config as string !
    if isinstance(payload, str):
        root_hint = payload
        payload = {
            "root_hint": root_hint,
            "load_file": True,
        }

    # Create default config
    result = {}
    result = dict(self.conf_default)
    result.update(payload)

    # The payload is a dir or a config file
    root_hint = result.get("root_hint")
    filenames = result.get("filenames")
    _payload1 = self.get_ctx(root_hint, config_files=filenames)
    result.update(_payload1)

    # Build default runtime from root path
    root_path = result["root_path"]

    paasify_source_dir = get_paasify_pkg_dir()
    paasify_plugins_dir = os.path.join(paasify_source_dir, "assets", "plugins")
    private_dir = os.path.join(root_path, ".paasify")
    collection_dir = os.path.join(private_dir, "_collections")
    jsonnet_dir = os.path.join(private_dir, "plugins")

    # Env overrides
    if PAASIFY_ENV_COLLECTION_DIR in os.environ:
        collection_dir = os.environ[PAASIFY_ENV_COLLECTION_DIR]
        self.log.info(
            f"Default collection directory updated from environment: {PAASIFY_ENV_COLLECTION_DIR}={collection_dir}"
        )

    _payload2 = {
        "paasify_source_dir": paasify_source_dir,
        "paasify_plugins_dir": paasify_plugins_dir,
        "project_private_dir": private_dir,
        "project_collection_dir": collection_dir,
        "project_jsonnet_dir": jsonnet_dir,
    }
    result.update(_payload2)

    # Allow user to override parts
    result.update(payload)
    return result

Paasify Stacks

paasify.stacks

Paasify Stack management

This library provides two classes:

  • StackManager: Manage a list of stacks
  • Stack: A stack instance

Stack

Bases: NodeMap, PaasifyObj

Paasify Stack Instance

Source code in paasify/stacks.py
  60
  61
  62
  63
  64
  65
  66
  67
  68
  69
  70
  71
  72
  73
  74
  75
  76
  77
  78
  79
  80
  81
  82
  83
  84
  85
  86
  87
  88
  89
  90
  91
  92
  93
  94
  95
  96
  97
  98
  99
 100
 101
 102
 103
 104
 105
 106
 107
 108
 109
 110
 111
 112
 113
 114
 115
 116
 117
 118
 119
 120
 121
 122
 123
 124
 125
 126
 127
 128
 129
 130
 131
 132
 133
 134
 135
 136
 137
 138
 139
 140
 141
 142
 143
 144
 145
 146
 147
 148
 149
 150
 151
 152
 153
 154
 155
 156
 157
 158
 159
 160
 161
 162
 163
 164
 165
 166
 167
 168
 169
 170
 171
 172
 173
 174
 175
 176
 177
 178
 179
 180
 181
 182
 183
 184
 185
 186
 187
 188
 189
 190
 191
 192
 193
 194
 195
 196
 197
 198
 199
 200
 201
 202
 203
 204
 205
 206
 207
 208
 209
 210
 211
 212
 213
 214
 215
 216
 217
 218
 219
 220
 221
 222
 223
 224
 225
 226
 227
 228
 229
 230
 231
 232
 233
 234
 235
 236
 237
 238
 239
 240
 241
 242
 243
 244
 245
 246
 247
 248
 249
 250
 251
 252
 253
 254
 255
 256
 257
 258
 259
 260
 261
 262
 263
 264
 265
 266
 267
 268
 269
 270
 271
 272
 273
 274
 275
 276
 277
 278
 279
 280
 281
 282
 283
 284
 285
 286
 287
 288
 289
 290
 291
 292
 293
 294
 295
 296
 297
 298
 299
 300
 301
 302
 303
 304
 305
 306
 307
 308
 309
 310
 311
 312
 313
 314
 315
 316
 317
 318
 319
 320
 321
 322
 323
 324
 325
 326
 327
 328
 329
 330
 331
 332
 333
 334
 335
 336
 337
 338
 339
 340
 341
 342
 343
 344
 345
 346
 347
 348
 349
 350
 351
 352
 353
 354
 355
 356
 357
 358
 359
 360
 361
 362
 363
 364
 365
 366
 367
 368
 369
 370
 371
 372
 373
 374
 375
 376
 377
 378
 379
 380
 381
 382
 383
 384
 385
 386
 387
 388
 389
 390
 391
 392
 393
 394
 395
 396
 397
 398
 399
 400
 401
 402
 403
 404
 405
 406
 407
 408
 409
 410
 411
 412
 413
 414
 415
 416
 417
 418
 419
 420
 421
 422
 423
 424
 425
 426
 427
 428
 429
 430
 431
 432
 433
 434
 435
 436
 437
 438
 439
 440
 441
 442
 443
 444
 445
 446
 447
 448
 449
 450
 451
 452
 453
 454
 455
 456
 457
 458
 459
 460
 461
 462
 463
 464
 465
 466
 467
 468
 469
 470
 471
 472
 473
 474
 475
 476
 477
 478
 479
 480
 481
 482
 483
 484
 485
 486
 487
 488
 489
 490
 491
 492
 493
 494
 495
 496
 497
 498
 499
 500
 501
 502
 503
 504
 505
 506
 507
 508
 509
 510
 511
 512
 513
 514
 515
 516
 517
 518
 519
 520
 521
 522
 523
 524
 525
 526
 527
 528
 529
 530
 531
 532
 533
 534
 535
 536
 537
 538
 539
 540
 541
 542
 543
 544
 545
 546
 547
 548
 549
 550
 551
 552
 553
 554
 555
 556
 557
 558
 559
 560
 561
 562
 563
 564
 565
 566
 567
 568
 569
 570
 571
 572
 573
 574
 575
 576
 577
 578
 579
 580
 581
 582
 583
 584
 585
 586
 587
 588
 589
 590
 591
 592
 593
 594
 595
 596
 597
 598
 599
 600
 601
 602
 603
 604
 605
 606
 607
 608
 609
 610
 611
 612
 613
 614
 615
 616
 617
 618
 619
 620
 621
 622
 623
 624
 625
 626
 627
 628
 629
 630
 631
 632
 633
 634
 635
 636
 637
 638
 639
 640
 641
 642
 643
 644
 645
 646
 647
 648
 649
 650
 651
 652
 653
 654
 655
 656
 657
 658
 659
 660
 661
 662
 663
 664
 665
 666
 667
 668
 669
 670
 671
 672
 673
 674
 675
 676
 677
 678
 679
 680
 681
 682
 683
 684
 685
 686
 687
 688
 689
 690
 691
 692
 693
 694
 695
 696
 697
 698
 699
 700
 701
 702
 703
 704
 705
 706
 707
 708
 709
 710
 711
 712
 713
 714
 715
 716
 717
 718
 719
 720
 721
 722
 723
 724
 725
 726
 727
 728
 729
 730
 731
 732
 733
 734
 735
 736
 737
 738
 739
 740
 741
 742
 743
 744
 745
 746
 747
 748
 749
 750
 751
 752
 753
 754
 755
 756
 757
 758
 759
 760
 761
 762
 763
 764
 765
 766
 767
 768
 769
 770
 771
 772
 773
 774
 775
 776
 777
 778
 779
 780
 781
 782
 783
 784
 785
 786
 787
 788
 789
 790
 791
 792
 793
 794
 795
 796
 797
 798
 799
 800
 801
 802
 803
 804
 805
 806
 807
 808
 809
 810
 811
 812
 813
 814
 815
 816
 817
 818
 819
 820
 821
 822
 823
 824
 825
 826
 827
 828
 829
 830
 831
 832
 833
 834
 835
 836
 837
 838
 839
 840
 841
 842
 843
 844
 845
 846
 847
 848
 849
 850
 851
 852
 853
 854
 855
 856
 857
 858
 859
 860
 861
 862
 863
 864
 865
 866
 867
 868
 869
 870
 871
 872
 873
 874
 875
 876
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
class Stack(NodeMap, PaasifyObj):
    "Paasify Stack Instance"

    conf_default = {
        "dir": None,
        "name": None,
        "app": None,
        "tags": [],
        "tags_suffix": [],
        "tags_prefix": [],
        "vars": [],
    }

    conf_children = [
        {
            "key": "app",
            "cls": StackApp,
            "action": "unset",
            "hook": "node_hook_app_load",
        },
        {
            "key": "vars",
            "cls": PaasifyConfigVars,
        },
    ]

    conf_schema = {
        # TODO: Bug: We want a way in stacks to have random/useless values, because
        # while doing trial/errors, the parser becomes annoying. Ie, if extra key
        # starts with a `_`, then simply skip the parser error. This could be a switch
        # like `--develop`
        "$schema": "http://json-schema.org/draft-07/schema#",
        "title": "Paasify Stack configuration",
        "default": conf_default,
        "oneOf": [
            {
                "type": "object",
                "additionalProperties": False,
                "properties": {
                    "name": {
                        "type": "string",
                        "minLength": 2,
                        "title": "Name of the stack",
                        "description": "This is a stack ident, it should not be changed. If not set, it comes from the app name, and from the dir if no apps. Should not contains any special char, except - (or _). This is used to name the app (docker, dns, etc ...)",
                    },
                    "dir": {
                        "type": "string",
                        "title": "Directory where live the stack",
                        "description": "Directory where the docker-compose.run.yml file is generated. Can be used to move stack directories (cold move). Default comes from the name.",
                        "default": "$name",
                    },
                    "app": {
                        "type": "string",
                        "title": "Application call",
                        "description": "Shortcut to use an application without modifying it's default parameters. First optional part is module collection, and after the ':', it's the name/path to the aplication inside the module",
                    },
                    "tags": StackTagMgr.conf_schema,
                    "tags_prefix": StackTagMgr.conf_schema,
                    "tags_suffix": StackTagMgr.conf_schema,
                    "vars": PaasifyConfigVars.conf_schema,
                },
            },
            {
                "type": "string",
                "title": "Direct application call",
                "description": "Shortcut to use an application without modifying it's default parameters. First optional part is module collection, and after the ':', it's the name/path to the aplication inside the module",
                "pattern": "^([^:]+:)?(.*)$",
            },
        ],
    }

    # Children objects
    tag_manager = None
    engine = None
    prj = None

    # Stack vars
    stack_dir = None
    stack_name = None
    prj_dir = None
    prj_ns = None

    # Vars scopes
    var_scopes = {
        "global": ["default_conf", "extra_vars_yaml", "global_conf"],
        "stack": [
            "default_conf",
            "app_yaml",
            "stack_yaml",
            "extra_vars_yaml",
            "global_conf",
            "tag_def",
            "tag_dyn",
            "stack_conf",
        ],
    }

    # CaFram functions
    # ---------------------

    def node_hook_init(self):
        "Create instance attributes"

        self._cache = {}

        # Internal attributes
        self.prj = self.get_parent().get_parent()
        self.runtime = self.prj.runtime
        assert (
            self.prj.__class__.__name__ == "PaasifyProject"
        ), f"Expected PaasifyProject, got: {self.prj}"

    def node_hook_transform(self, payload):

        if isinstance(payload, str):
            key = "app" if ":" in payload else "name"
            payload = {
                key: payload,
            }

        assert isinstance(payload, dict)
        return payload

    def node_hook_app_load(self):
        "Modify stack depending app"

        # Assert name,dir,app
        stack_name = self.name
        stack_dir = self.dir
        stack_app = self.app

        # Check name first
        if not stack_name:

            # Fetch from app
            if stack_app:
                stack_name = stack_app.app_name

            # Fetch from dir
            elif stack_dir:
                # stack_name = os.path.split(stack_dir)[-1]
                stack_name = "_".join(stack_dir.split(os.path.sep))

            if not stack_name:
                assert False, f"Missing name, or app or path for stack: {self}"

        if not stack_dir:
            stack_dir = stack_name

        if "/" in stack_name:
            # TODO: Workaround, this should be handled before ...
            stack_name = stack_name.replace("/", "-")

        # Register required vars
        self.explain = False
        self.stack_name = stack_name
        self.stack_dir = stack_dir
        self.prj_ns = self.prj.config.namespace or self.prj.runtime.namespace
        self.prj_path = self.prj.runtime.root_path
        self.stack_path = os.path.join(self.prj.runtime.root_path, stack_dir)
        self.stack_dump_path = os.path.join(
            self.runtime.project_private_dir, "_dumps", stack_dir
        )
        self.ident = self.stack_name
        self.stack_path_abs = os.path.abspath(self.stack_path)

        # Check
        assert self.stack_name, f"Bug here, should not be empty, got: {self.stack_name}"
        assert re.search("^[a-zA-Z0-9_].*$", self.stack_name), f"Got: {self.stack_name}"
        assert re.search("^[a-zA-Z0-9_/].*$", self.stack_dir), f"Got: {self.stack_dir}"

    def node_hook_final(self):
        "Enable CLI debugging"

        # Create engine instance
        payload = {
            "stack_name": f"{self.prj_ns}_{self.stack_name}",
            "stack_path": self.stack_path,
            # os.path.join(self.stack_dir, "docker-compose.run.yml"),
            "docker_file": "docker-compose.run.yml",
        }
        self.engine = self.prj.engine_cls(parent=self, payload=payload)

        # Prepare stack lists
        tag_config = {
            "raw": self.tags or self.prj.config.tags,
            "tag_prefix": ["_paasify"]
            + (self.tags_prefix or self.prj.config.tags_prefix),
            "tag_suffix": self.tags_suffix or self.prj.config.tags_suffix,
        }

        # Start managers
        self.tag_manager = StackTagMgr(
            parent=self, ident=self.stack_name, payload=tag_config
        )
        self.var_manager = None

        self.log.info(f"Stack config: {self.stack_name} in {self.stack_path}")

        # Set sh default args
        self.default_sh_args = {"_fg": True}
        if self.prj.runtime.no_tty:
            self.default_sh_args = {"_in": False}

    # Local functions
    # ---------------------

    def docker_candidates(self) -> list:
        """Return all docker-files candidates: local, app and tags

        Search docker-compose files in the following dirs:

          * Main docker-compose:
            * <local>/docker-compose.y?ml
            * <app>/docker-compose.y?ml
          * Additional docker-composes:
            * <local>/docker-compose.<tag>.y?ml
            * <app>/docker-compose.<tag>.y?ml

        Return the list of candidates for the stack
        """

        # 0. Check cache
        _key_cache = "docker_candidates"
        results = self._cache.get(_key_cache)
        if results:
            return results

        # 1. Init
        app = self.app or None
        pattern = ["docker-compose.yml", "docker-compose.yaml"]

        # 2. Lookup stack docker-compose files
        lookup_app = FileLookup()
        lookup_app.append(self.stack_path, pattern)

        # 3. Lookup app docker-compose files
        if app:
            app_path = app.get_app_path()
            lookup_app.append(app_path, pattern)

        # 4. Assemble results
        matches = lookup_app.match()
        results = [match["match"] for match in matches]

        # 5. Sanity check
        for file in results:
            assert isinstance(file, str), f"Got: {file}"

        # 6. Filter result
        if len(results) < 1:
            paths = [look["path"] for look in lookup_app.get_lookups()]
            paths = ", ".join(paths)
            msg = f"Can't find 'docker-compose.yml' for stack '{self._node_conf_raw}' in: {paths}"
            raise error.StackMissingDockerComposeFile(msg)

        # TODO: Test ideas: test if local_cand and app_cand are properly setup depending the pattern

        # Set in cache and return value
        self._cache[_key_cache] = results
        return results

    def get_tag_plan(self) -> list:
        """
        Resolve all files associated to tags

        Return the list of tags with files
        """

        # 0. Init
        # Objects:
        app = self.app or None
        # Vars:
        stack_dir = self.stack_path
        project_jsonnet_dir = self.prj.runtime.project_jsonnet_dir
        docker_candidates = self.docker_candidates()

        # 1. Generate default tag (docker compose files only)
        tag_base = {
            "tag": None,
            "jsonnet_file": None,
            "docker_file": first(docker_candidates),
        }

        # 2. Forward to StackTagManager: Generate directory lookup for tags

        # Prepare paths
        app_jsonnet_dir = None
        dirs_docker = [stack_dir]
        dirs_jsonnet = []

        # Get jsonnet search paths
        if app:
            src = app.get_app_path()
            assert src
            dirs_docker.append(src)
            app_jsonnet_dir = src

        # Get jsonnet search paths
        for path in [stack_dir, project_jsonnet_dir, app_jsonnet_dir]:
            if path:
                dirs_jsonnet.append(path)
        for src in self.prj.sources.get_all():
            dirs_jsonnet.append(os.path.join(src.path, ".paasify", "plugins"))

        # Build tag list
        tags = self.tag_manager.get_children()
        tag_list = []
        for tag in tags:

            # Docker lookup
            pattern = [
                f"docker-compose.{tag.name}.yml",
                f"docker-compose.{tag.name}.yaml",
            ]
            lookup = FileLookup()
            self.log.trace(
                f"Looking up {', '.join(pattern)} docker-compose files in: {', '.join(dirs_docker)}"
            )
            for dir_ in dirs_docker:
                lookup.append(dir_, pattern)
            docker_cand = lookup.match()

            docker_file = None
            if len(docker_cand) > 0:
                docker_file = first(docker_cand)["match"]

            # Jsonnet lookup
            pattern = [f"{tag.name}.jsonnet"]
            lookup = FileLookup()
            jsonnet_file = None
            self.log.trace(
                f"Looking up {', '.join(pattern)} jsonnet files in: {', '.join(dirs_jsonnet)}"
            )
            for dir_ in dirs_jsonnet:
                lookup.append(dir_, pattern)
                jsonnet_cand = lookup.match()
                if len(jsonnet_cand) > 0:
                    jsonnet_file = first(jsonnet_cand)["match"]

            self.log.info(f"Tag '{tag.name}' matches: {docker_file}, {jsonnet_file}")
            ret = {
                "tag": tag,
                "jsonnet_file": jsonnet_file,
                "docker_file": docker_file,
            }
            tag_list.append(ret)

            # Report error to user on missing tags
            if not docker_file and not jsonnet_file:
                msg = f"Can't find '{tag.name}' tag for '{self.stack_name}' stack"
                raise error.MissingTag(msg)

        # 3. Return result list
        results = []
        results.append(tag_base)
        results.extend(tag_list)
        return results, dirs_docker, dirs_jsonnet

    def _gen_conveniant_vars(self, docker_file, tag_names=None) -> dict:
        "Generate default core variables"

        # Extract stack config
        tag_names = tag_names or []
        dfile = anyconfig.load(docker_file, ac_ordered=True, ac_parser="yaml")
        default_service = first(dfile.get("services", ["default"]))
        default_network = first(dfile.get("networks", ["default"]))

        assert isinstance(self.prj_ns, str)
        assert isinstance(self.prj_path, str)
        assert isinstance(self.stack_name, str)
        assert isinstance(self.prj_path, str)
        assert isinstance(self.prj_path, str)
        assert self.stack_path_abs.startswith("/")

        # Build default (only primitives)
        result = {
            "paasify_sep": "-",
            "paasify_sep_dir": os.sep,
            # See: https://www.docker.com/blog/announcing-compose-v2-general-availability/
            "paasify_sep_net": "_",
            "_prj_path": self.prj_path,
            "_prj_namespace": self.prj_ns,  # deprecated because too long !
            "_prj_ns": self.prj_ns,
            "_prj_domain": to_domain(self.prj_ns),
            "_prj_stack_path": self.stack_path,
            # Colon is used here for easier to parsing for later ...
            "_prj_stack_tags": f":{':'.join(tag_names)}:",
            "_stack_name": self.stack_name,
            "_stack_path_abs": self.stack_path_abs,
            "_stack_network": default_network,
            "_stack_service": default_service,
            # To report below as well
            "_stack_app_name": None,
            "_stack_app_dir": None,
            "_stack_app_path": None,
            "_stack_collection_app_path": None,
        }

        app = self.app
        if app:
            extra = {
                "_stack_app_name": os.path.basename(app.app_name),
                "_stack_app_dir": app.app_name,
                "_stack_app_path": app.get_app_path(),
                # TODO: Broken ... use app.src instead ?
                # "_stack_collection_app_path": self.app.collection_dir,
            }
            result.update(extra)

        # Remove or duplicate vars with _
        for key in list(result.keys()):
            if key.startswith("_"):
                new_key = key[1:]
                result[new_key] = result[key]
                # DEPRECATED: We want to remove most or all _ vars !
                # del result[key]

        return result

    @property
    def docker_vars_lookup(self) -> list:
        "Return the lookup configuration for vars.yml location"

        # Lookup config
        fl_stack = {
            "path": self.stack_path,
            "pattern": ["vars.yml", "vars.yaml"],
            "kind": "stack",
            "owner": "user",
        }
        lookup = FileLookup()
        lookup.append(**fl_stack)

        if self.app:
            app_dir = self.app.get_app_path()
            assert app_dir, "Missing app name!"
            fl_app = {
                "path": app_dir,
                "pattern": ["vars.yml", "vars.yaml"],
                "kind": "app",
                "owner": "app",
            }
            lookup.insert(**fl_app)

        return lookup

    @property
    def extra_vars_lookups(self) -> list:
        "Return the lookup configuration for extra_vars location"

        prj_config = self.get_parents()[-2].config
        runtime = self.prj.runtime

        extra_vars = prj_config.extra_vars.get_value()
        extra_vars = extra_vars if isinstance(extra_vars, list) else [extra_vars]

        ret = FileLookup()
        for ref in extra_vars:
            ref = FileReference(ref, root=runtime.root_path)
            dir_, file_ = os.path.split(ref.path())
            xtra = {
                "kind": "extra_vars",
                "owner": "user",
            }
            ret.append(dir_, [file_], **xtra)
        return ret

    def render_vars(
        self,
        hint=None,
        parse=True,
        skip_undefined=False,
        scope=None,
        parse_vars=None,
        varmgr=None,
        explain=None,
    ):
        "Return parsed vars for a given scope"

        assert hint, f"Missing valid hint for render_vars call for {self} ..."

        # Detect selector
        parse_vars = parse_vars or {}
        explain = explain if isinstance(explain, bool) else self.explain
        func = None
        scopes = None
        varmgr = varmgr or self.var_manager
        assert varmgr, "Var manager is not initialized or provided !"
        if isinstance(scope, str):
            scopes = scope.split(",")
        elif isinstance(scope, list):
            scopes = scope

        # Attribute function
        def _func(var):
            return var.scope in scopes

        func = _func if scopes else scope

        # Parse the result
        msg = f"Environment rendering asked for scope: parse={parse}, hint={hint}"
        self.log.trace(msg)
        try:
            result = varmgr.render_env(
                parse=parse,
                parse_vars=parse_vars,
                skip_undefined=skip_undefined,
                select=func,
                hint=hint,
            )
        except error.UndeclaredVariable as err:
            if explain is True:
                self.log.notice("Explain current vars")
                self.var_manager.explain()
            else:
                self.log.notice("Use --explain flag to get current vars")
            raise err

        return result

    def get_stack_vars(self, sta, all_tags, jsonnet_lookup_dirs, extra_user_vars=None):

        # 0. Get ojects we need to query
        varmgr = VarMgr(parent=self, ident=f"{self.stack_name}")

        globvars = self.prj.config.vars
        localvars = self.vars
        extra_user_vars = extra_user_vars or {}

        # 1. Generate data structures
        docker_file = all_tags[0]["docker_file"]
        tag_names = uniq([tag["tag"].name for tag in all_tags if tag["tag"]])

        vars_default = self._gen_conveniant_vars(
            docker_file=docker_file, tag_names=tag_names
        )
        stack_name = vars_default["_stack_name"]

        vars_global = {var.name: var.value for var in globvars.get_vars_list()}
        vars_user = {var.name: var.value for var in localvars.get_vars_list()}

        # 2. Create a varmgr VarMgr and inject configs

        # Order matters here:
        #   - Add core vars, the one starting with _
        #   - Add extra_vars, from external files
        #   - Add vars from app/vars.yml, then stack/vars.yml
        #   - Add global conf vars from paasify.yml
        #   - Add stack conf vars from paasify.yml
        varmgr.add_vars(
            vars_default,
            10000,
            scope="global",
            kind="default_conf",
            source="core",
            file="paasify.py",
            owner="paasify",
        )
        varmgr.add_vars_from_lookup(
            self.extra_vars_lookups, 50000, fail_on_missing=True, scope="global"
        )
        varmgr.add_vars_from_lookup(self.docker_vars_lookup, 20000, scope="stack")
        varmgr.add_vars(
            vars_global,
            70000,
            scope="global",
            kind="global_conf",
            source="core",
            file="paasify.yml:config.vars",
            owner="user",
        )
        varmgr.add_vars(
            vars_user,
            80000,
            scope="stack",
            kind="stack_conf",
            source="core",
            file=f"paasify.yml:stacks[{stack_name}]vars",
            owner="user",
        )

        # 4. Add config for tags as well
        cand_index = -2  # TODO: we sould remove -2 if app or -1 if not
        tag_instances = []
        prio_index = 0
        for cand in all_tags:
            cand_index += 1
            prio_index += 100

            # Fetch the tag
            # --------------------
            tag = cand.get("tag")
            if not tag:
                continue
            tag_vars = tag.vars or {}
            tag_name = tag.name

            jsonnet_file = cand.get("jsonnet_file")
            if not jsonnet_file:
                # Throw a user warning about wrong config
                if len(tag_vars) > 0:
                    msg = f"Tag vars are only supported for jsonnet tags: {tag_name}: {tag_vars}"
                    self.log.warning(msg)
                continue

            # Prepare loop metadata
            # --------------------
            tag_index = 0
            tag_inst = f"{tag_name}{tag_index}"
            while tag_inst in tag_instances:
                tag_index += 1
                tag_inst = f"{tag_name}{tag_index}"
            tag_instances.append(tag_inst)

            # Assemble context and conf
            # --------------------
            loop_vars = {}
            loop_vars.update(tag_vars)

            varmgr.add_vars(
                loop_vars,
                90000 + prio_index,
                # scope="tag",
                scope=f"tag_{tag_inst}",
                kind="tag_conf",
                source=tag_inst,
                file=f"paasify.yml:stacks[{stack_name}]tags[{tag_inst}]",
                owner="user",
            )

            # 3.2 Execute jsonnet plugin var calls
            # --------------------
            if tag_index > 0:
                # We only process ONE time the filter of each kind
                continue

            ctx = self.render_vars(
                scope="global,stack",
                parse=False,
                hint=f"jsonnet scoped vars: {tag_name}",
                varmgr=varmgr,
            )

            tmp2 = sta.process_jsonnet_exec(
                jsonnet_file,
                "plugin_vars",
                {"args": ctx},
                import_dirs=jsonnet_lookup_dirs,
            )
            try:
                var_def = tmp2["def"]
                var_dyn = tmp2["dyn"]
            except KeyError as err:
                self.log.error(
                    f"Could not execute plugin '{jsonnet_file}', please check the plugin is working as expected, especially ensure variable 'metadata.ident' is correctly set to the plugin name"
                )
                self.log.debug(f"Plugin returned: {tmp2}")
                raise error.JsonnetBuildFailed(err)

            varmgr.add_vars(
                var_def,
                30000 + prio_index,
                scope="stack",
                owner=tag_name,
                kind="tag_def",
                source=tag_name,
                file=jsonnet_file,
            )
            varmgr.add_vars(
                var_dyn,
                40000 + prio_index,
                scope="stack",
                owner=tag_name,
                kind="tag_dyn",
                source=tag_name,
                file=jsonnet_file,
            )

        return varmgr

    def log_extra_payloads(self, payload, msg):
        "Dump to log large chunks of data"

        if not self.dump_data_log:
            return

        self.log.trace("= {msg}")
        self.log.trace("=" * 60)
        self.log.trace("{pformat(payload)}")
        self.log.trace("=" * 60)

    def dump_diff_stage1(self, dumper, varmgr, glob_vars, stack_vars, all_tags):
        "Report build diff stage1"

        txt = []
        txt.append("==== Global Scope: Vars")
        txt.append(
            varmgr.explain(scope=lambda var: var.scope in ["global"], as_string=True)
        )
        txt.append("\n==== Global Scope: Parsed")
        txt.append(to_yaml(glob_vars))
        txt = "\n".join(txt)
        dumper.dump("0-glob-env.txt", txt)

        txt = []
        txt.append("==== Stack Scope: Tags")
        txt.append(pformat(all_tags))
        txt.append("\n==== Stack Scope: Vars")
        txt.append(
            varmgr.explain(
                scope=lambda var: var.scope in ["global", "stack"], as_string=True
            )
        )
        txt.append("\n==== Stack Scope: Parsed")
        txt.append(to_yaml(stack_vars))
        txt = "\n".join(txt)
        dumper.dump("0-stack-env.txt", txt)

    def dump_diff_stage2(
        self, dumper, varmgr, loop_vars, scope_filter, tag_inst, cand_index
    ):
        "Report build diff stage2"
        txt = []
        txt.append("==== Jsonnet Scope: Explain")
        txt.append(varmgr.explain(scope=scope_filter, as_string=True))
        txt.append("\n==== Jsonnet Scope: Vars")
        txt.append(to_yaml(loop_vars))
        txt = "\n".join(txt)

        dumper.dump(f"2-{cand_index:02d}-{tag_inst}-env.txt", txt)

    def assemble(self, vars_only=False, dump_payloads=False, explain=None):
        """Generate docker-compose.run.yml and parse it with jsonnet

        vars_only: If False, do nothing, if True or non empty list, just dump the variables
        explain: Show the explainer and/or build diff

        """

        # 1. Prepare assemble context
        # -------------------
        self.explain = explain if isinstance(explain, bool) else self.explain
        sta = StackAssembler(parent=self, ident=f"{self.stack_name}")
        all_tags, docker_lookup_dirs, jsonnet_lookup_dirs = self.get_tag_plan()
        self.var_manager = self.get_stack_vars(sta, all_tags, jsonnet_lookup_dirs)

        # 2. Prepare debugging tools
        # -------------------
        dump_payload_log = self.runtime.dump_payload_log
        self.dump_data_log = dump_payloads
        if explain and not vars_only:
            dumper = StackDumper(self.stack_dump_path, enabled=True)

        # 3. Prepare scopes
        # -------------------
        glob_vars = self.render_vars(
            scope="global",
            hint="global scoped variables",
            parse=True,
        )
        stack_vars = self.render_vars(
            scope="global,stack",
            parse=True,
            parse_vars=glob_vars,
            hint="stack scoped variables",
        )

        # 4. Intermediate debugging tools
        # -------------------
        self.log_extra_payloads(stack_vars, "Dump of docker environment vars")

        if explain and not vars_only:
            self.dump_diff_stage1(
                dumper, self.var_manager, glob_vars, stack_vars, all_tags
            )

        if vars_only:
            if explain:
                self.var_manager.explain(filter_vars=vars_only)

            out = dict(stack_vars)
            if isinstance(vars_only, list):
                out = {key: stack_vars.get(key) for key in vars_only}
            return out

        # 2. Build docker-compose
        # -------------------
        docker_run_payload = sta.assemble_docker_compose(
            all_tags,
            self.engine,
            env=stack_vars,
            dump_payload_log=dump_payload_log,
        )

        if explain:
            dumper.dump("1-docker-compose.yml", docker_run_payload, fmt="yaml")

        # 3. Assemble jsonnet tags
        # -------------------
        cand_index = (
            -2
        )  # We start at minus 2 here, because there is the 2 firsts tags are hidden
        tag_instances = []
        tag_names = []

        # Loop over each tags
        for cand in all_tags:
            cand_index += 1

            # 3.0 Init loop
            # --------------------

            # Check tag infos
            tag = cand.get("tag")
            tag_name = tag.name if tag else "_paasify"

            # Check conditions
            jsonnet_file = cand.get("jsonnet_file")
            if not jsonnet_file:
                continue

            # 3.1 Reload var context if overriden
            # --------------------
            tag_index = 0
            tag_inst = f"{tag_name}{tag_index}"
            while tag_inst in tag_instances:
                tag_index += 1
                tag_inst = f"{tag_name}{tag_index}"
            tag_instances.append(tag_inst)
            if tag_name not in tag_names:
                tag_names.append(tag_name)

            tag_suffix = f"{tag_index}" if tag_index != 0 else ""
            self.log.info(f"Apply jsonnet tag '{tag_inst}': {jsonnet_file}")

            # 3.2 Generate loop vars
            # --------------------
            scope = f"global,stack,tag_{tag_inst}"

            def scope_filter(var):
                # pylint: disable=cell-var-from-loop
                return var.scope in scope.split(",") and var.owner != tag_name

            loop_vars = self.render_vars(
                scope=scope_filter,
                parse=True,
                parse_vars=stack_vars,
                hint=f"jsonnet transform vars: {tag_inst}",
            )

            loop_vars.update(
                {
                    "tag_cand": cand_index,
                    "tag_index": tag_index,
                    "tag_instance": tag_inst,
                    "tag_suffix": tag_suffix,
                }
            )

            # Logging
            self.log_extra_payloads(
                loop_vars, f"Dump of vars before '{tag_inst}' jsonnet execution"
            )
            if explain:
                self.dump_diff_stage2(
                    dumper,
                    self.var_manager,
                    loop_vars,
                    scope_filter,
                    tag_inst,
                    cand_index,
                )

            # 3.3 Prepare jsonnet call
            # --------------------
            params = {
                "args": loop_vars,
                "docker_data": docker_run_payload,
            }
            docker_run_payload = sta.process_jsonnet_exec(
                jsonnet_file,
                "docker_transform",
                params,
                import_dirs=jsonnet_lookup_dirs,
            )
            if explain:
                dumper.dump(
                    f"2-{cand_index:02d}-{tag_inst}-out.yml",
                    docker_run_payload,
                    fmt="yml",
                )

        # 4. Write output file
        # -------------------

        if explain:
            dumper.dump("3-docker.yml", docker_run_payload, fmt="yml")

        # Prepare docker-file output directory
        if not os.path.isdir(self.stack_path):
            self.log.info(f"Create missing directory: {self.stack_path}")
            os.makedirs(self.stack_path)

        # Save the final docker-compose.run.yml file
        outfile = os.path.join(self.stack_path, "docker-compose.run.yml")
        self.log.info(f"Writing docker-compose file: {outfile}")
        output = to_yaml(docker_run_payload)
        write_file(outfile, output)

        if explain:
            dumper.show_diff()

        # 5. Prepare environment
        # -------------------
        # This is a first a basic implementation of apps volumes with permissions
        # TOFIX: Permission change will apply the same permissions on all volumes in a blind way
        # Ie: Permission for mysql containers is not the same as the app itself.
        volumes = docker_run_payload.get("volumes", {})
        uid = int(stack_vars.get("app_puid", "-1"))
        gid = int(stack_vars.get("app_pgid", "-1"))
        for vol_name, vol_def in volumes.items():
            driver = vol_def.get("driver")
            driver_opts = vol_def.get("driver_opts")
            if driver == "local" and driver_opts:
                device = driver_opts.get("device")
                if device and not os.path.exists(device):
                    self.log.info(
                        f"Create volume directory '{vol_name}' with owner '{uid}:{gid}': {device}"
                    )
                    os.makedirs(device)
                    os.chown(device, uid, gid)

    def explain_tags(self):
        "Explain hos tags are processed on stack"

        print(f"  Plugins for stack: {self.ident}")
        tag_config, docker_lookup_dirs, jsonnet_lookup_dirs = self.get_tag_plan()

        # Display lookup paths
        print("")
        print(2 * "  " + "Stack lookups:")
        print(3 * "  " + "Docker-compose paths:")
        for path in docker_lookup_dirs:
            print(4 * "  " + f"- {path}")
        print(3 * "  " + "Jsonnet paths:")
        for path in jsonnet_lookup_dirs:
            print(3 * "  " + f"- {path}")

        # Display found files
        print("")
        print(2 * "  " + "Stack components:")
        print(3 * "  " + "Docker-compose files:")
        matches = first([match for match in tag_config if match["tag"] is None])
        if matches:
            src = matches["docker_file"]
            basename = os.path.basename(src)
            print(4 * "  " + f"* {basename:<20} {src}")
        matches = [match for match in tag_config if match["docker_file"] is not None]
        if matches:
            for match in matches:
                tag = match["tag"]
                if tag:
                    src = match["docker_file"]
                    print(4 * "  " + f"- {tag.name:<20} {src}")

        print(3 * "  " + "Jsonnet tags files:")
        matches = [match for match in tag_config if match["jsonnet_file"] is not None]
        if matches:
            for match in matches:
                tag = match["tag"]
                if tag:
                    src = match["jsonnet_file"]
                    print(4 * "  " + f"- {tag.name:<20} {src}")

    def gen_doc(self, output_dir=None):
        "Generate documentation"

        matches, docker_lookup_dirs, jsonnet_lookup_dirs = self.get_tag_plan()

        # 3. Show jsonschema
        print("\n    Plugins jsonschema:")
        for match in matches:

            tag = match.get("tag")
            if not tag:
                continue

            file = match.get("jsonnet_file")
            if not file:
                continue

            # Create output dir
            dest_dir = os.path.join(output_dir, tag.name)
            if not os.path.isdir(dest_dir):
                os.makedirs(dest_dir)

            print(f"        # {tag.name}: {file}")
            out = self.process_jsonnet(file, "metadata", None)
            tag_meta = out["metadata"]
            tag_schema = tag_meta.get("jsonschema")
            # pprint (tag_meta)
            if "jsonschema" in tag_meta:
                del tag_meta["jsonschema"]

            dest_schema = os.path.join(dest_dir, "jsonschema")
            if tag_schema:
                print(f"Generated jsonschema files in: {dest_schema}.[json|yml]")
                write_file(dest_schema + ".json", to_json(tag_schema))
                write_file(dest_schema + ".yml", to_yaml(tag_schema))

            # Create HTML documentation
            if ENABLE_JSON_SCHEMA:

                fname = "web.html"
                dest_html = os.path.join(dest_dir, fname)
                print(f"Generated HTML doc in: {dest_html}")
                config = GenerationConfiguration(
                    copy_css=True,
                    description_is_markdown=True,
                    examples_as_yaml=True,
                    footer_show_time=False,
                    expand_buttons=True,
                    show_breadcrumbs=False,
                )
                generate_from_filename(dest_schema + ".json", dest_html, config=config)

                # /schema_doc/paasify_yml_schema.html
                # /plugin_api_doc/{tag.name}/web.html
                markdown_doc = f"""
# {tag.name}

Documentationfor tag: `{tag.name}`

## Informations

``` yaml
{to_yaml(tag_meta)}
```

## Tag documentation

<iframe scrolling="yes" src="/plugins_apidoc/{tag.name}/{fname}" style="width: 100vw; height: 70vh; overflow: auto; border: 0px;">
</iframe>


                """
                dest_md = os.path.join(dest_dir, "markdown.md")
                write_file(dest_md, markdown_doc)
                print(f"Generated Markdown doc in: {dest_md}")
docker_vars_lookup: list property

Return the lookup configuration for vars.yml location

extra_vars_lookups: list property

Return the lookup configuration for extra_vars location

assemble(vars_only=False, dump_payloads=False, explain=None)

Generate docker-compose.run.yml and parse it with jsonnet

vars_only: If False, do nothing, if True or non empty list, just dump the variables explain: Show the explainer and/or build diff

Source code in paasify/stacks.py
def assemble(self, vars_only=False, dump_payloads=False, explain=None):
    """Generate docker-compose.run.yml and parse it with jsonnet

    vars_only: If False, do nothing, if True or non empty list, just dump the variables
    explain: Show the explainer and/or build diff

    """

    # 1. Prepare assemble context
    # -------------------
    self.explain = explain if isinstance(explain, bool) else self.explain
    sta = StackAssembler(parent=self, ident=f"{self.stack_name}")
    all_tags, docker_lookup_dirs, jsonnet_lookup_dirs = self.get_tag_plan()
    self.var_manager = self.get_stack_vars(sta, all_tags, jsonnet_lookup_dirs)

    # 2. Prepare debugging tools
    # -------------------
    dump_payload_log = self.runtime.dump_payload_log
    self.dump_data_log = dump_payloads
    if explain and not vars_only:
        dumper = StackDumper(self.stack_dump_path, enabled=True)

    # 3. Prepare scopes
    # -------------------
    glob_vars = self.render_vars(
        scope="global",
        hint="global scoped variables",
        parse=True,
    )
    stack_vars = self.render_vars(
        scope="global,stack",
        parse=True,
        parse_vars=glob_vars,
        hint="stack scoped variables",
    )

    # 4. Intermediate debugging tools
    # -------------------
    self.log_extra_payloads(stack_vars, "Dump of docker environment vars")

    if explain and not vars_only:
        self.dump_diff_stage1(
            dumper, self.var_manager, glob_vars, stack_vars, all_tags
        )

    if vars_only:
        if explain:
            self.var_manager.explain(filter_vars=vars_only)

        out = dict(stack_vars)
        if isinstance(vars_only, list):
            out = {key: stack_vars.get(key) for key in vars_only}
        return out

    # 2. Build docker-compose
    # -------------------
    docker_run_payload = sta.assemble_docker_compose(
        all_tags,
        self.engine,
        env=stack_vars,
        dump_payload_log=dump_payload_log,
    )

    if explain:
        dumper.dump("1-docker-compose.yml", docker_run_payload, fmt="yaml")

    # 3. Assemble jsonnet tags
    # -------------------
    cand_index = (
        -2
    )  # We start at minus 2 here, because there is the 2 firsts tags are hidden
    tag_instances = []
    tag_names = []

    # Loop over each tags
    for cand in all_tags:
        cand_index += 1

        # 3.0 Init loop
        # --------------------

        # Check tag infos
        tag = cand.get("tag")
        tag_name = tag.name if tag else "_paasify"

        # Check conditions
        jsonnet_file = cand.get("jsonnet_file")
        if not jsonnet_file:
            continue

        # 3.1 Reload var context if overriden
        # --------------------
        tag_index = 0
        tag_inst = f"{tag_name}{tag_index}"
        while tag_inst in tag_instances:
            tag_index += 1
            tag_inst = f"{tag_name}{tag_index}"
        tag_instances.append(tag_inst)
        if tag_name not in tag_names:
            tag_names.append(tag_name)

        tag_suffix = f"{tag_index}" if tag_index != 0 else ""
        self.log.info(f"Apply jsonnet tag '{tag_inst}': {jsonnet_file}")

        # 3.2 Generate loop vars
        # --------------------
        scope = f"global,stack,tag_{tag_inst}"

        def scope_filter(var):
            # pylint: disable=cell-var-from-loop
            return var.scope in scope.split(",") and var.owner != tag_name

        loop_vars = self.render_vars(
            scope=scope_filter,
            parse=True,
            parse_vars=stack_vars,
            hint=f"jsonnet transform vars: {tag_inst}",
        )

        loop_vars.update(
            {
                "tag_cand": cand_index,
                "tag_index": tag_index,
                "tag_instance": tag_inst,
                "tag_suffix": tag_suffix,
            }
        )

        # Logging
        self.log_extra_payloads(
            loop_vars, f"Dump of vars before '{tag_inst}' jsonnet execution"
        )
        if explain:
            self.dump_diff_stage2(
                dumper,
                self.var_manager,
                loop_vars,
                scope_filter,
                tag_inst,
                cand_index,
            )

        # 3.3 Prepare jsonnet call
        # --------------------
        params = {
            "args": loop_vars,
            "docker_data": docker_run_payload,
        }
        docker_run_payload = sta.process_jsonnet_exec(
            jsonnet_file,
            "docker_transform",
            params,
            import_dirs=jsonnet_lookup_dirs,
        )
        if explain:
            dumper.dump(
                f"2-{cand_index:02d}-{tag_inst}-out.yml",
                docker_run_payload,
                fmt="yml",
            )

    # 4. Write output file
    # -------------------

    if explain:
        dumper.dump("3-docker.yml", docker_run_payload, fmt="yml")

    # Prepare docker-file output directory
    if not os.path.isdir(self.stack_path):
        self.log.info(f"Create missing directory: {self.stack_path}")
        os.makedirs(self.stack_path)

    # Save the final docker-compose.run.yml file
    outfile = os.path.join(self.stack_path, "docker-compose.run.yml")
    self.log.info(f"Writing docker-compose file: {outfile}")
    output = to_yaml(docker_run_payload)
    write_file(outfile, output)

    if explain:
        dumper.show_diff()

    # 5. Prepare environment
    # -------------------
    # This is a first a basic implementation of apps volumes with permissions
    # TOFIX: Permission change will apply the same permissions on all volumes in a blind way
    # Ie: Permission for mysql containers is not the same as the app itself.
    volumes = docker_run_payload.get("volumes", {})
    uid = int(stack_vars.get("app_puid", "-1"))
    gid = int(stack_vars.get("app_pgid", "-1"))
    for vol_name, vol_def in volumes.items():
        driver = vol_def.get("driver")
        driver_opts = vol_def.get("driver_opts")
        if driver == "local" and driver_opts:
            device = driver_opts.get("device")
            if device and not os.path.exists(device):
                self.log.info(
                    f"Create volume directory '{vol_name}' with owner '{uid}:{gid}': {device}"
                )
                os.makedirs(device)
                os.chown(device, uid, gid)
docker_candidates()

Return all docker-files candidates: local, app and tags

Search docker-compose files in the following dirs
  • Main docker-compose:
  • /docker-compose.y?ml
  • /docker-compose.y?ml
  • Additional docker-composes:
  • /docker-compose..y?ml
  • /docker-compose..y?ml

Return the list of candidates for the stack

Source code in paasify/stacks.py
def docker_candidates(self) -> list:
    """Return all docker-files candidates: local, app and tags

    Search docker-compose files in the following dirs:

      * Main docker-compose:
        * <local>/docker-compose.y?ml
        * <app>/docker-compose.y?ml
      * Additional docker-composes:
        * <local>/docker-compose.<tag>.y?ml
        * <app>/docker-compose.<tag>.y?ml

    Return the list of candidates for the stack
    """

    # 0. Check cache
    _key_cache = "docker_candidates"
    results = self._cache.get(_key_cache)
    if results:
        return results

    # 1. Init
    app = self.app or None
    pattern = ["docker-compose.yml", "docker-compose.yaml"]

    # 2. Lookup stack docker-compose files
    lookup_app = FileLookup()
    lookup_app.append(self.stack_path, pattern)

    # 3. Lookup app docker-compose files
    if app:
        app_path = app.get_app_path()
        lookup_app.append(app_path, pattern)

    # 4. Assemble results
    matches = lookup_app.match()
    results = [match["match"] for match in matches]

    # 5. Sanity check
    for file in results:
        assert isinstance(file, str), f"Got: {file}"

    # 6. Filter result
    if len(results) < 1:
        paths = [look["path"] for look in lookup_app.get_lookups()]
        paths = ", ".join(paths)
        msg = f"Can't find 'docker-compose.yml' for stack '{self._node_conf_raw}' in: {paths}"
        raise error.StackMissingDockerComposeFile(msg)

    # TODO: Test ideas: test if local_cand and app_cand are properly setup depending the pattern

    # Set in cache and return value
    self._cache[_key_cache] = results
    return results
dump_diff_stage1(dumper, varmgr, glob_vars, stack_vars, all_tags)

Report build diff stage1

Source code in paasify/stacks.py
def dump_diff_stage1(self, dumper, varmgr, glob_vars, stack_vars, all_tags):
    "Report build diff stage1"

    txt = []
    txt.append("==== Global Scope: Vars")
    txt.append(
        varmgr.explain(scope=lambda var: var.scope in ["global"], as_string=True)
    )
    txt.append("\n==== Global Scope: Parsed")
    txt.append(to_yaml(glob_vars))
    txt = "\n".join(txt)
    dumper.dump("0-glob-env.txt", txt)

    txt = []
    txt.append("==== Stack Scope: Tags")
    txt.append(pformat(all_tags))
    txt.append("\n==== Stack Scope: Vars")
    txt.append(
        varmgr.explain(
            scope=lambda var: var.scope in ["global", "stack"], as_string=True
        )
    )
    txt.append("\n==== Stack Scope: Parsed")
    txt.append(to_yaml(stack_vars))
    txt = "\n".join(txt)
    dumper.dump("0-stack-env.txt", txt)
dump_diff_stage2(dumper, varmgr, loop_vars, scope_filter, tag_inst, cand_index)

Report build diff stage2

Source code in paasify/stacks.py
def dump_diff_stage2(
    self, dumper, varmgr, loop_vars, scope_filter, tag_inst, cand_index
):
    "Report build diff stage2"
    txt = []
    txt.append("==== Jsonnet Scope: Explain")
    txt.append(varmgr.explain(scope=scope_filter, as_string=True))
    txt.append("\n==== Jsonnet Scope: Vars")
    txt.append(to_yaml(loop_vars))
    txt = "\n".join(txt)

    dumper.dump(f"2-{cand_index:02d}-{tag_inst}-env.txt", txt)
explain_tags()

Explain hos tags are processed on stack

Source code in paasify/stacks.py
def explain_tags(self):
    "Explain hos tags are processed on stack"

    print(f"  Plugins for stack: {self.ident}")
    tag_config, docker_lookup_dirs, jsonnet_lookup_dirs = self.get_tag_plan()

    # Display lookup paths
    print("")
    print(2 * "  " + "Stack lookups:")
    print(3 * "  " + "Docker-compose paths:")
    for path in docker_lookup_dirs:
        print(4 * "  " + f"- {path}")
    print(3 * "  " + "Jsonnet paths:")
    for path in jsonnet_lookup_dirs:
        print(3 * "  " + f"- {path}")

    # Display found files
    print("")
    print(2 * "  " + "Stack components:")
    print(3 * "  " + "Docker-compose files:")
    matches = first([match for match in tag_config if match["tag"] is None])
    if matches:
        src = matches["docker_file"]
        basename = os.path.basename(src)
        print(4 * "  " + f"* {basename:<20} {src}")
    matches = [match for match in tag_config if match["docker_file"] is not None]
    if matches:
        for match in matches:
            tag = match["tag"]
            if tag:
                src = match["docker_file"]
                print(4 * "  " + f"- {tag.name:<20} {src}")

    print(3 * "  " + "Jsonnet tags files:")
    matches = [match for match in tag_config if match["jsonnet_file"] is not None]
    if matches:
        for match in matches:
            tag = match["tag"]
            if tag:
                src = match["jsonnet_file"]
                print(4 * "  " + f"- {tag.name:<20} {src}")
gen_doc(output_dir=None)

Generate documentation

Source code in paasify/stacks.py
    def gen_doc(self, output_dir=None):
        "Generate documentation"

        matches, docker_lookup_dirs, jsonnet_lookup_dirs = self.get_tag_plan()

        # 3. Show jsonschema
        print("\n    Plugins jsonschema:")
        for match in matches:

            tag = match.get("tag")
            if not tag:
                continue

            file = match.get("jsonnet_file")
            if not file:
                continue

            # Create output dir
            dest_dir = os.path.join(output_dir, tag.name)
            if not os.path.isdir(dest_dir):
                os.makedirs(dest_dir)

            print(f"        # {tag.name}: {file}")
            out = self.process_jsonnet(file, "metadata", None)
            tag_meta = out["metadata"]
            tag_schema = tag_meta.get("jsonschema")
            # pprint (tag_meta)
            if "jsonschema" in tag_meta:
                del tag_meta["jsonschema"]

            dest_schema = os.path.join(dest_dir, "jsonschema")
            if tag_schema:
                print(f"Generated jsonschema files in: {dest_schema}.[json|yml]")
                write_file(dest_schema + ".json", to_json(tag_schema))
                write_file(dest_schema + ".yml", to_yaml(tag_schema))

            # Create HTML documentation
            if ENABLE_JSON_SCHEMA:

                fname = "web.html"
                dest_html = os.path.join(dest_dir, fname)
                print(f"Generated HTML doc in: {dest_html}")
                config = GenerationConfiguration(
                    copy_css=True,
                    description_is_markdown=True,
                    examples_as_yaml=True,
                    footer_show_time=False,
                    expand_buttons=True,
                    show_breadcrumbs=False,
                )
                generate_from_filename(dest_schema + ".json", dest_html, config=config)

                # /schema_doc/paasify_yml_schema.html
                # /plugin_api_doc/{tag.name}/web.html
                markdown_doc = f"""
# {tag.name}

Documentationfor tag: `{tag.name}`

## Informations

``` yaml
{to_yaml(tag_meta)}
```

## Tag documentation

<iframe scrolling="yes" src="/plugins_apidoc/{tag.name}/{fname}" style="width: 100vw; height: 70vh; overflow: auto; border: 0px;">
</iframe>


                """
                dest_md = os.path.join(dest_dir, "markdown.md")
                write_file(dest_md, markdown_doc)
                print(f"Generated Markdown doc in: {dest_md}")
get_tag_plan()

Resolve all files associated to tags

Return the list of tags with files

Source code in paasify/stacks.py
def get_tag_plan(self) -> list:
    """
    Resolve all files associated to tags

    Return the list of tags with files
    """

    # 0. Init
    # Objects:
    app = self.app or None
    # Vars:
    stack_dir = self.stack_path
    project_jsonnet_dir = self.prj.runtime.project_jsonnet_dir
    docker_candidates = self.docker_candidates()

    # 1. Generate default tag (docker compose files only)
    tag_base = {
        "tag": None,
        "jsonnet_file": None,
        "docker_file": first(docker_candidates),
    }

    # 2. Forward to StackTagManager: Generate directory lookup for tags

    # Prepare paths
    app_jsonnet_dir = None
    dirs_docker = [stack_dir]
    dirs_jsonnet = []

    # Get jsonnet search paths
    if app:
        src = app.get_app_path()
        assert src
        dirs_docker.append(src)
        app_jsonnet_dir = src

    # Get jsonnet search paths
    for path in [stack_dir, project_jsonnet_dir, app_jsonnet_dir]:
        if path:
            dirs_jsonnet.append(path)
    for src in self.prj.sources.get_all():
        dirs_jsonnet.append(os.path.join(src.path, ".paasify", "plugins"))

    # Build tag list
    tags = self.tag_manager.get_children()
    tag_list = []
    for tag in tags:

        # Docker lookup
        pattern = [
            f"docker-compose.{tag.name}.yml",
            f"docker-compose.{tag.name}.yaml",
        ]
        lookup = FileLookup()
        self.log.trace(
            f"Looking up {', '.join(pattern)} docker-compose files in: {', '.join(dirs_docker)}"
        )
        for dir_ in dirs_docker:
            lookup.append(dir_, pattern)
        docker_cand = lookup.match()

        docker_file = None
        if len(docker_cand) > 0:
            docker_file = first(docker_cand)["match"]

        # Jsonnet lookup
        pattern = [f"{tag.name}.jsonnet"]
        lookup = FileLookup()
        jsonnet_file = None
        self.log.trace(
            f"Looking up {', '.join(pattern)} jsonnet files in: {', '.join(dirs_jsonnet)}"
        )
        for dir_ in dirs_jsonnet:
            lookup.append(dir_, pattern)
            jsonnet_cand = lookup.match()
            if len(jsonnet_cand) > 0:
                jsonnet_file = first(jsonnet_cand)["match"]

        self.log.info(f"Tag '{tag.name}' matches: {docker_file}, {jsonnet_file}")
        ret = {
            "tag": tag,
            "jsonnet_file": jsonnet_file,
            "docker_file": docker_file,
        }
        tag_list.append(ret)

        # Report error to user on missing tags
        if not docker_file and not jsonnet_file:
            msg = f"Can't find '{tag.name}' tag for '{self.stack_name}' stack"
            raise error.MissingTag(msg)

    # 3. Return result list
    results = []
    results.append(tag_base)
    results.extend(tag_list)
    return results, dirs_docker, dirs_jsonnet
log_extra_payloads(payload, msg)

Dump to log large chunks of data

Source code in paasify/stacks.py
def log_extra_payloads(self, payload, msg):
    "Dump to log large chunks of data"

    if not self.dump_data_log:
        return

    self.log.trace("= {msg}")
    self.log.trace("=" * 60)
    self.log.trace("{pformat(payload)}")
    self.log.trace("=" * 60)
node_hook_app_load()

Modify stack depending app

Source code in paasify/stacks.py
def node_hook_app_load(self):
    "Modify stack depending app"

    # Assert name,dir,app
    stack_name = self.name
    stack_dir = self.dir
    stack_app = self.app

    # Check name first
    if not stack_name:

        # Fetch from app
        if stack_app:
            stack_name = stack_app.app_name

        # Fetch from dir
        elif stack_dir:
            # stack_name = os.path.split(stack_dir)[-1]
            stack_name = "_".join(stack_dir.split(os.path.sep))

        if not stack_name:
            assert False, f"Missing name, or app or path for stack: {self}"

    if not stack_dir:
        stack_dir = stack_name

    if "/" in stack_name:
        # TODO: Workaround, this should be handled before ...
        stack_name = stack_name.replace("/", "-")

    # Register required vars
    self.explain = False
    self.stack_name = stack_name
    self.stack_dir = stack_dir
    self.prj_ns = self.prj.config.namespace or self.prj.runtime.namespace
    self.prj_path = self.prj.runtime.root_path
    self.stack_path = os.path.join(self.prj.runtime.root_path, stack_dir)
    self.stack_dump_path = os.path.join(
        self.runtime.project_private_dir, "_dumps", stack_dir
    )
    self.ident = self.stack_name
    self.stack_path_abs = os.path.abspath(self.stack_path)

    # Check
    assert self.stack_name, f"Bug here, should not be empty, got: {self.stack_name}"
    assert re.search("^[a-zA-Z0-9_].*$", self.stack_name), f"Got: {self.stack_name}"
    assert re.search("^[a-zA-Z0-9_/].*$", self.stack_dir), f"Got: {self.stack_dir}"
node_hook_final()

Enable CLI debugging

Source code in paasify/stacks.py
def node_hook_final(self):
    "Enable CLI debugging"

    # Create engine instance
    payload = {
        "stack_name": f"{self.prj_ns}_{self.stack_name}",
        "stack_path": self.stack_path,
        # os.path.join(self.stack_dir, "docker-compose.run.yml"),
        "docker_file": "docker-compose.run.yml",
    }
    self.engine = self.prj.engine_cls(parent=self, payload=payload)

    # Prepare stack lists
    tag_config = {
        "raw": self.tags or self.prj.config.tags,
        "tag_prefix": ["_paasify"]
        + (self.tags_prefix or self.prj.config.tags_prefix),
        "tag_suffix": self.tags_suffix or self.prj.config.tags_suffix,
    }

    # Start managers
    self.tag_manager = StackTagMgr(
        parent=self, ident=self.stack_name, payload=tag_config
    )
    self.var_manager = None

    self.log.info(f"Stack config: {self.stack_name} in {self.stack_path}")

    # Set sh default args
    self.default_sh_args = {"_fg": True}
    if self.prj.runtime.no_tty:
        self.default_sh_args = {"_in": False}
node_hook_init()

Create instance attributes

Source code in paasify/stacks.py
def node_hook_init(self):
    "Create instance attributes"

    self._cache = {}

    # Internal attributes
    self.prj = self.get_parent().get_parent()
    self.runtime = self.prj.runtime
    assert (
        self.prj.__class__.__name__ == "PaasifyProject"
    ), f"Expected PaasifyProject, got: {self.prj}"
render_vars(hint=None, parse=True, skip_undefined=False, scope=None, parse_vars=None, varmgr=None, explain=None)

Return parsed vars for a given scope

Source code in paasify/stacks.py
def render_vars(
    self,
    hint=None,
    parse=True,
    skip_undefined=False,
    scope=None,
    parse_vars=None,
    varmgr=None,
    explain=None,
):
    "Return parsed vars for a given scope"

    assert hint, f"Missing valid hint for render_vars call for {self} ..."

    # Detect selector
    parse_vars = parse_vars or {}
    explain = explain if isinstance(explain, bool) else self.explain
    func = None
    scopes = None
    varmgr = varmgr or self.var_manager
    assert varmgr, "Var manager is not initialized or provided !"
    if isinstance(scope, str):
        scopes = scope.split(",")
    elif isinstance(scope, list):
        scopes = scope

    # Attribute function
    def _func(var):
        return var.scope in scopes

    func = _func if scopes else scope

    # Parse the result
    msg = f"Environment rendering asked for scope: parse={parse}, hint={hint}"
    self.log.trace(msg)
    try:
        result = varmgr.render_env(
            parse=parse,
            parse_vars=parse_vars,
            skip_undefined=skip_undefined,
            select=func,
            hint=hint,
        )
    except error.UndeclaredVariable as err:
        if explain is True:
            self.log.notice("Explain current vars")
            self.var_manager.explain()
        else:
            self.log.notice("Use --explain flag to get current vars")
        raise err

    return result

StackManager

Bases: NodeList, PaasifyObj

Manage a list of stacks

Source code in paasify/stacks.py
class StackManager(NodeList, PaasifyObj):
    "Manage a list of stacks"

    conf_schema = {
        # "$schema": "http://json-schema.org/draft-07/schema#",
        "title": "Paasify Stack configuration",
        "description": "Stacks are defined in a list of objects",
        "type": "array",
        "default": [],
        "items": Stack.conf_schema,
    }

    conf_children = Stack
    ident = "main"

    def node_hook_final(self):
        "Enable CLI logging and validate config"

        # Safety checks
        dup_names = []
        dup_dirs = []
        curr_index = -1
        for stack in self.get_children():
            curr_index += 1
            stack_name = stack.stack_name

            # Check for duplicates names
            if stack_name in dup_names:
                index = dup_names.index(stack_name)
                raise error.ProjectInvalidConfig(
                    f"Cannot have duplicate stack names, stacks {index} and {curr_index} share the same name: '{stack_name}'"
                )
            dup_names.append(stack_name)

            # Check for duplicates dirs
            stack_dir = stack.stack_dir
            if stack_dir in dup_dirs:
                index = dup_dirs.index(stack_dir)
                raise error.ProjectInvalidConfig(
                    f"Cannot have duplicate stack dir, stacks {index} and {curr_index} share the same dir: '{stack_dir}'"
                )
            dup_dirs.append(stack_dir)

        # Notice user because this is weird
        children = self.get_children()
        if not children:
            self.log.warning("No stacks found for this project!")

    # Stack management API
    # ======================

    def list_stacks(self):
        "Get stacks children (deprecated)"
        return self.get_children()

    def get_stacks_attr_ident(self):
        "List stack per idents"
        return [x.ident for x in self.get_children()]

    def get_stacks_attr(self, attr="ident"):
        "List stacks by attribute"
        return [getattr(x, attr) for x in self.get_children()]

    def get_stacks_obj(self, attr=None, values=None):
        """
        Get stack instance matching in values

        If attr or value is None, return all instances
        Values must be an array of valid vallues.
        """

        sub_dir = self._node_parent.runtime.sub_dir
        if values is None and sub_dir:
            # attr = path
            values = sub_dir.split(os.path.sep)[0]
            print(f"Automatch subdir: {values}")

        if isinstance(attr, str) and values is not None:
            if not isinstance(values, list):
                values = [values]
            result = [
                stack for stack in self.get_children() if getattr(stack, attr) in values
            ]
            return result

        return self.get_children()

    # Command Base API
    # ======================

    @stack_target
    def cmd_stack_assemble(self, stacks=None, vars_only=False, explain=False):
        "Assemble a stack"

        self.log.info("Asemble stacks:")
        for stack in stacks:
            self.log.notice(f"Assemble stack: {stack.stack_name}")
            stack.assemble(vars_only=vars_only, explain=explain)

    @stack_target
    def cmd_stack_up(self, stacks=None):
        "Start a stack"

        self.log.info("Start stacks:")
        for stack in stacks:
            self.log.notice(f"  Start stack: {stack.stack_name}")
            stack.engine.up(**stack.default_sh_args)

    @stack_target
    def cmd_stack_down(self, stacks=None, ignore_errors=False):
        "Stop a stack"

        stacks = list(stacks)
        stacks.reverse()
        self.log.info("Stop stacks:")
        for stack in stacks:
            self.log.notice(f"  Stop stack: {stack.stack_name}")
            try:
                stack.engine.down(**stack.default_sh_args)
            except error.DockerCommandFailed:
                if not ignore_errors:
                    raise
                self.log.debug(
                    f"Ignoring stop failure in case of recreate for stack: {stack.stack_name}"
                )

    @stack_target
    def cmd_stack_ps(self, stacks=None):
        "List stacks process"

        if len(stacks) < 1:
            self.log.notice("  No process founds")
            return

        # self.log.notice("List of processes:")
        for stack in stacks:
            # self.log.notice(f"Process of stack: {stack.stack_name}")
            stack.engine.ps()

    # Shortcuts
    # ======================
    @stack_target
    def cmd_stack_apply(self, stacks=None):
        "Apply a stack"

        self.log.notice("Apply stacks")
        self.cmd_stack_assemble(stacks=stacks)
        self.cmd_stack_up(stacks=stacks)
        self.log.notice("Stack has been applied")

    @stack_target
    def cmd_stack_recreate(self, stacks=None):
        "Recreate a stack"

        self.log.notice("Recreate stacks")
        self.cmd_stack_down(stacks=stacks, ignore_errors=True)
        self.cmd_stack_assemble(stacks=stacks)
        self.cmd_stack_up(stacks=stacks)
        self.log.notice("Stack has been recreated")

    # Other commands
    # ======================

    def cmd_stack_ls(self, stacks=None):
        "List command to stacks"

        for stack in self.get_children():
            stack_app = stack.app.app if stack.app else None
            print(f"  - {stack.stack_name}:")
            print(f"      app: {stack_app}")
            print(f"      path: {stack.stack_path}")

    @stack_target
    def cmd_stack_vars(self, stacks=None, vars_=None, explain=False):
        "Show vars of stack"

        if isinstance(vars_, list):
            self.log.info(f"Restrict output to variables: {','.join(vars_)}")

        for stack in stacks:
            self.log.notice(f"Get stack vars: {stack.stack_name}")
            ret = stack.assemble(vars_only=vars_ or True, explain=explain)
            # TODO: PAtch cafram to support ordered yaml output !
            # See: https://github.com/barbu-it/cafram/blob/main/cafram/utils.py#L261
            # See: https://stackoverflow.com/questions/40226610/ruamel-yaml-equivalent-of-sort-keys
            if ret:
                pprint(ret)
            # print (to_yaml(ret))

    @stack_target
    def cmd_stack_explain(self, stacks=None):
        "Show informations on project plugins"

        for stack in stacks:
            stack.explain_tags()

        # if isinstance(mode, str):
        #     dst_path = mode
        #     self.log.notice("Generate documentation in dir:", dst_path)
        #     for stack in self.get_children():
        #         stack.gen_doc(output_dir=dst_path)

    @stack_target
    def cmd_stack_logs(self, stacks=None, follow=False):
        "Display stack/services logs"

        if follow and len(stacks) > 1:
            self.log.warning(
                "Disabling log following as it's not possible on more tha one stack"
            )
            follow = None

        for stack in stacks:
            self.log.notice(f"Logs of stack: {stack.stack_name}")
            stack.engine.logs(follow)
cmd_stack_apply(stacks=None)

Apply a stack

Source code in paasify/stacks.py
@stack_target
def cmd_stack_apply(self, stacks=None):
    "Apply a stack"

    self.log.notice("Apply stacks")
    self.cmd_stack_assemble(stacks=stacks)
    self.cmd_stack_up(stacks=stacks)
    self.log.notice("Stack has been applied")
cmd_stack_assemble(stacks=None, vars_only=False, explain=False)

Assemble a stack

Source code in paasify/stacks.py
@stack_target
def cmd_stack_assemble(self, stacks=None, vars_only=False, explain=False):
    "Assemble a stack"

    self.log.info("Asemble stacks:")
    for stack in stacks:
        self.log.notice(f"Assemble stack: {stack.stack_name}")
        stack.assemble(vars_only=vars_only, explain=explain)
cmd_stack_down(stacks=None, ignore_errors=False)

Stop a stack

Source code in paasify/stacks.py
@stack_target
def cmd_stack_down(self, stacks=None, ignore_errors=False):
    "Stop a stack"

    stacks = list(stacks)
    stacks.reverse()
    self.log.info("Stop stacks:")
    for stack in stacks:
        self.log.notice(f"  Stop stack: {stack.stack_name}")
        try:
            stack.engine.down(**stack.default_sh_args)
        except error.DockerCommandFailed:
            if not ignore_errors:
                raise
            self.log.debug(
                f"Ignoring stop failure in case of recreate for stack: {stack.stack_name}"
            )
cmd_stack_explain(stacks=None)

Show informations on project plugins

Source code in paasify/stacks.py
@stack_target
def cmd_stack_explain(self, stacks=None):
    "Show informations on project plugins"

    for stack in stacks:
        stack.explain_tags()
cmd_stack_logs(stacks=None, follow=False)

Display stack/services logs

Source code in paasify/stacks.py
@stack_target
def cmd_stack_logs(self, stacks=None, follow=False):
    "Display stack/services logs"

    if follow and len(stacks) > 1:
        self.log.warning(
            "Disabling log following as it's not possible on more tha one stack"
        )
        follow = None

    for stack in stacks:
        self.log.notice(f"Logs of stack: {stack.stack_name}")
        stack.engine.logs(follow)
cmd_stack_ls(stacks=None)

List command to stacks

Source code in paasify/stacks.py
def cmd_stack_ls(self, stacks=None):
    "List command to stacks"

    for stack in self.get_children():
        stack_app = stack.app.app if stack.app else None
        print(f"  - {stack.stack_name}:")
        print(f"      app: {stack_app}")
        print(f"      path: {stack.stack_path}")
cmd_stack_ps(stacks=None)

List stacks process

Source code in paasify/stacks.py
@stack_target
def cmd_stack_ps(self, stacks=None):
    "List stacks process"

    if len(stacks) < 1:
        self.log.notice("  No process founds")
        return

    # self.log.notice("List of processes:")
    for stack in stacks:
        # self.log.notice(f"Process of stack: {stack.stack_name}")
        stack.engine.ps()
cmd_stack_recreate(stacks=None)

Recreate a stack

Source code in paasify/stacks.py
@stack_target
def cmd_stack_recreate(self, stacks=None):
    "Recreate a stack"

    self.log.notice("Recreate stacks")
    self.cmd_stack_down(stacks=stacks, ignore_errors=True)
    self.cmd_stack_assemble(stacks=stacks)
    self.cmd_stack_up(stacks=stacks)
    self.log.notice("Stack has been recreated")
cmd_stack_up(stacks=None)

Start a stack

Source code in paasify/stacks.py
@stack_target
def cmd_stack_up(self, stacks=None):
    "Start a stack"

    self.log.info("Start stacks:")
    for stack in stacks:
        self.log.notice(f"  Start stack: {stack.stack_name}")
        stack.engine.up(**stack.default_sh_args)
cmd_stack_vars(stacks=None, vars_=None, explain=False)

Show vars of stack

Source code in paasify/stacks.py
@stack_target
def cmd_stack_vars(self, stacks=None, vars_=None, explain=False):
    "Show vars of stack"

    if isinstance(vars_, list):
        self.log.info(f"Restrict output to variables: {','.join(vars_)}")

    for stack in stacks:
        self.log.notice(f"Get stack vars: {stack.stack_name}")
        ret = stack.assemble(vars_only=vars_ or True, explain=explain)
        # TODO: PAtch cafram to support ordered yaml output !
        # See: https://github.com/barbu-it/cafram/blob/main/cafram/utils.py#L261
        # See: https://stackoverflow.com/questions/40226610/ruamel-yaml-equivalent-of-sort-keys
        if ret:
            pprint(ret)
get_stacks_attr(attr='ident')

List stacks by attribute

Source code in paasify/stacks.py
def get_stacks_attr(self, attr="ident"):
    "List stacks by attribute"
    return [getattr(x, attr) for x in self.get_children()]
get_stacks_attr_ident()

List stack per idents

Source code in paasify/stacks.py
def get_stacks_attr_ident(self):
    "List stack per idents"
    return [x.ident for x in self.get_children()]
get_stacks_obj(attr=None, values=None)

Get stack instance matching in values

If attr or value is None, return all instances Values must be an array of valid vallues.

Source code in paasify/stacks.py
def get_stacks_obj(self, attr=None, values=None):
    """
    Get stack instance matching in values

    If attr or value is None, return all instances
    Values must be an array of valid vallues.
    """

    sub_dir = self._node_parent.runtime.sub_dir
    if values is None and sub_dir:
        # attr = path
        values = sub_dir.split(os.path.sep)[0]
        print(f"Automatch subdir: {values}")

    if isinstance(attr, str) and values is not None:
        if not isinstance(values, list):
            values = [values]
        result = [
            stack for stack in self.get_children() if getattr(stack, attr) in values
        ]
        return result

    return self.get_children()
list_stacks()

Get stacks children (deprecated)

Source code in paasify/stacks.py
def list_stacks(self):
    "Get stacks children (deprecated)"
    return self.get_children()
node_hook_final()

Enable CLI logging and validate config

Source code in paasify/stacks.py
def node_hook_final(self):
    "Enable CLI logging and validate config"

    # Safety checks
    dup_names = []
    dup_dirs = []
    curr_index = -1
    for stack in self.get_children():
        curr_index += 1
        stack_name = stack.stack_name

        # Check for duplicates names
        if stack_name in dup_names:
            index = dup_names.index(stack_name)
            raise error.ProjectInvalidConfig(
                f"Cannot have duplicate stack names, stacks {index} and {curr_index} share the same name: '{stack_name}'"
            )
        dup_names.append(stack_name)

        # Check for duplicates dirs
        stack_dir = stack.stack_dir
        if stack_dir in dup_dirs:
            index = dup_dirs.index(stack_dir)
            raise error.ProjectInvalidConfig(
                f"Cannot have duplicate stack dir, stacks {index} and {curr_index} share the same dir: '{stack_dir}'"
            )
        dup_dirs.append(stack_dir)

    # Notice user because this is weird
    children = self.get_children()
    if not children:
        self.log.warning("No stacks found for this project!")

stack_target(fn)

Decorator to magically find the correct stack to apply

Source code in paasify/stacks.py
def stack_target(fn):
    "Decorator to magically find the correct stack to apply"

    @wraps(fn)
    def wrapper(self, *args, stacks=None, stack_names=None, **kwargs):
        "Decorator to magically find the correct stack to apply"

        # Inteligently guess wich stack to use
        if not stacks:
            stacks = []

            if isinstance(stack_names, str):
                stack_names = [name.strip("/") for name in stack_names.split(",")]

            sub_dir = self.get_parent().runtime.sub_dir
            if not stack_names:

                if sub_dir:

                    # Use current dir stacks if in subdir
                    stack_path = sub_dir.split(os.path.sep)[0]
                    stacks = [
                        stack
                        for stack in self.get_children()
                        if stack_path == stack.stack_dir
                    ]
                    if stacks:
                        self.log.debug(
                            f"Use stack {stack_path} from current working directory"
                        )

                else:
                    # Use all stacks is set to None
                    self.log.debug("Use all stacks")
                    stacks = self.get_children()

            else:
                # Loop over specified list of tasks
                assert isinstance(stack_names, list), f"Got: {stack_names}"
                stack_names = [name for name in stack_names if name]
                stacks = [
                    stack
                    for stack in self.get_children()
                    if stack.stack_name in stack_names
                    or stack.stack_path in stack_names
                ]
                self.log.debug(
                    f"Requested stacks: {stack_names}, matched stacks: {stacks}"
                )

                # Assert all stacks has been addressed
                if len(stack_names) != len(stacks):
                    missing_stacks = list(stack_names)
                    for stack in stacks:
                        missing_stacks.remove(stack.stack_name)
                    raise error.StackNotFound(
                        f"This stack is not defined in config: {','.join(missing_stacks)}"
                    )

        # Clean decorator argument
        if "stacks_names" in kwargs:
            del kwargs["stacks_names"]

        # Last sanity tests
        if len(self.get_children()) < 1:
            raise error.StackNotFound(
                "There are no stacks configured yet for this project, please edit your paasify.yml config"
            )
        if len(stacks) < 1:
            raise error.StackNotFound(
                f"This stack is not defined in config: {stack_names}"
            )
        assert isinstance(stacks, list), f"Got: {stacks}"

        # Last user report
        stacks_names = ",".join([stack.stack_name for stack in stacks])
        self.log.info(f"Running command '{fn.__name__}' on stacks: {stacks_names}")
        return fn(self, *args, stacks=stacks, **kwargs)

    return wrapper

Paasify Stack Components

paasify.stack_components

Stack components class

StackApp

Bases: NodeMap, PaasifyObj

Stack Applicationk Object

Source code in paasify/stack_components.py
class StackApp(NodeMap, PaasifyObj):
    "Stack Applicationk Object"

    # conf_logger = "paasify.cli.stack_app"

    conf_default = {
        "app": None,
        "app_source": None,
        "app_path": None,
        "app_name": None,
    }

    @property
    def name(self):
        "App name attribute"
        return self.app_name

    # def node_hook_init(self, **kwargs):
    def node_hook_init(self):

        # Future: let's propagate from the top instead ...
        # parents = self.get_parents()
        # stack = parents[1]
        # prj = parents[3]
        self.stack = self._node_parent
        self.prj = self.stack._node_parent._node_parent
        self.sources = self.prj.sources

        self.app_dir = None

    def node_hook_transform(self, payload):

        if isinstance(payload, str):
            payload = {"app": payload}

        app_def = payload.get("app")
        app_path = payload.get("app_path")
        app_source = payload.get("app_source")
        app_name = payload.get("app_name")

        app_split = app_def.split(":", 2)

        if len(app_split) == 2:
            app_source = app_source or app_split[0] or "default"
            app_path = app_path or app_split[1]
        else:
            # Get from default namespace
            app_name = app_source or app_split[0] or "default"
            app_source = "default"
            app_path = app_name
        app_def = f"{app_source}:{app_path}"

        if not app_name:
            app_name = os.path.split(app_path)[-1]

        result = {
            "app": app_def,
            "app_path": app_path,
            "app_source": app_source,
            "app_name": app_name,
        }

        return result

    def get_app_source(self):
        "Return app source"

        app_source = self._node_conf_parsed["app_source"] or None
        target = self._node_conf_parsed["app_path"] or self._node_conf_parsed["app_name"]
        src = self.prj.sources.get_app_source(target, source=app_source)
        return src

    def get_app_path(self):
        "Return app path"

        src = self.get_app_source()
        target = self._node_conf_parsed["app_path"] or self._node_conf_parsed["app_name"]
        ret = os.path.join(src.path, target)
        return ret
name property

App name attribute

get_app_path()

Return app path

Source code in paasify/stack_components.py
def get_app_path(self):
    "Return app path"

    src = self.get_app_source()
    target = self._node_conf_parsed["app_path"] or self._node_conf_parsed["app_name"]
    ret = os.path.join(src.path, target)
    return ret
get_app_source()

Return app source

Source code in paasify/stack_components.py
def get_app_source(self):
    "Return app source"

    app_source = self._node_conf_parsed["app_source"] or None
    target = self._node_conf_parsed["app_path"] or self._node_conf_parsed["app_name"]
    src = self.prj.sources.get_app_source(target, source=app_source)
    return src

StackAssembler

Bases: PaasifyObj

Object to manage stack assemblage

Source code in paasify/stack_components.py
class StackAssembler(PaasifyObj):
    "Object to manage stack assemblage"

    # Internal object:
    # all_tags
    # engine

    # Docker processors
    # ===========================
    def _get_docker_files(self, all_tags):
        "Retrieve the list of tags docker-files"

        # TODO: Deprecated this, this has already been done before somewhere in the process !

        docker_files = []
        for cand in all_tags:
            docker_file = cand.get("docker_file")
            if docker_file:
                docker_files.append(docker_file)
                self.log.info(f"Insert docker-compose: {docker_file}")

        return docker_files

    def assemble_docker_compose(
        self, all_tags, engine, env=None, dump_payload_log=False
    ):
        "Generate the docker-compose file"

        docker_files = self._get_docker_files(all_tags)

        # Report to user
        env = env or {}
        assert isinstance(env, dict), f"Got: {env}"

        if dump_payload_log:
            self.log.trace("Docker vars:")
            for key, val in sorted(env.items()):
                self.log.trace(f"  {key}: {val}")

        out = engine.assemble(docker_files, env=env)
        # Exception is too wide !
        # try:
        #    out = engine.assemble(docker_files, env=env)
        # except Exception as err:
        # # TOTEST => except sh.ErrorReturnCode as err:
        #    err = bin2utf8(err)
        #    # pylint: disable=no-member
        #    self.log.critical(err.txterr)
        #    raise error.DockerBuildConfig(
        #        f"Impossible to build docker-compose files: {err}"
        #    ) from err

        # Fetch output
        docker_run_content = out.stdout.decode("utf-8")
        docker_run_payload = anyconfig.loads(docker_run_content, ac_parser="yaml")
        return docker_run_payload

    # Vars processors
    # ===========================

    def process_jsonnet_exec(self, file, action, data, import_dirs=None):
        "Process jsonnet file"

        # Developper init
        import_dirs = import_dirs or []
        data = data or {}
        assert isinstance(data, dict), f"Data must be dict, got: {data}"
        # assert len(import_dirs) > 2, f"Missing import dirs, got: {import_dirs}"

        # TODO: Enforce jsonnet API
        # assert action in [
        #     "metadata",
        #     "vars_default",
        #     "vars_override",
        #     "process_globals", # Testing WIP
        #     "process_transform", # Testing WIPP
        #     "docker_override",
        # ], f"Action not supported: {action}"

        # Prepare input variables
        mod_ident = os.path.splitext(os.path.basename(file))[0]
        ext_vars = {
            "parent": json.dumps(mod_ident),
            "action": json.dumps(action),
        }
        for key, val in data.items():
            ext_vars[key] = json.dumps(val)

        def try_path(dir_, rel):
            "Helper function to load a jsonnet file into memory for _jsonnet"

            if not rel:
                return None, None
                # raise RuntimeError("Got invalid filename (empty string).")

            if rel[0] == "/":
                full_path = rel
            else:
                full_path = os.path.join(dir_, rel)

            if full_path[-1] == "/":
                return None, None
                # raise RuntimeError("Attempted to import a directory")

            if not os.path.isfile(full_path):
                return full_path, None
            with open(
                full_path,
                encoding="utf-8",
            ) as file_:
                return full_path, file_.read()

        # Jsonnet import callback
        def import_callback(dir_, rel):
            "Helper function to load a jsonnet libraries in lookup paths"

            test_dirs = [dir_] + import_dirs
            for test_dir in test_dirs:
                full_path, content = try_path(test_dir, rel)
                self.log.trace(f"Load '{rel}' jsonnet from: {full_path}")
                if content:
                    return full_path, content.encode()

            test_dirs = " ".join(test_dirs)
            raise RuntimeError(
                f"Jsonnet file not found '{rel}' in any of these paths: {test_dirs}"
            )

        # Process jsonnet tag
        self.log.trace(f"Process jsonnet: {file} (action={action})")
        try:
            # pylint: disable=c-extension-no-member
            result = _jsonnet.evaluate_file(
                file,
                ext_vars=ext_vars,
                import_callback=import_callback,
            )
        except RuntimeError as err:
            self.log.critical(f"Can't parse jsonnet file: {file}")
            raise error.JsonnetBuildFailed(err)

        # Return python object from json output
        result = json.loads(result)
        return result
assemble_docker_compose(all_tags, engine, env=None, dump_payload_log=False)

Generate the docker-compose file

Source code in paasify/stack_components.py
def assemble_docker_compose(
    self, all_tags, engine, env=None, dump_payload_log=False
):
    "Generate the docker-compose file"

    docker_files = self._get_docker_files(all_tags)

    # Report to user
    env = env or {}
    assert isinstance(env, dict), f"Got: {env}"

    if dump_payload_log:
        self.log.trace("Docker vars:")
        for key, val in sorted(env.items()):
            self.log.trace(f"  {key}: {val}")

    out = engine.assemble(docker_files, env=env)
    # Exception is too wide !
    # try:
    #    out = engine.assemble(docker_files, env=env)
    # except Exception as err:
    # # TOTEST => except sh.ErrorReturnCode as err:
    #    err = bin2utf8(err)
    #    # pylint: disable=no-member
    #    self.log.critical(err.txterr)
    #    raise error.DockerBuildConfig(
    #        f"Impossible to build docker-compose files: {err}"
    #    ) from err

    # Fetch output
    docker_run_content = out.stdout.decode("utf-8")
    docker_run_payload = anyconfig.loads(docker_run_content, ac_parser="yaml")
    return docker_run_payload
process_jsonnet_exec(file, action, data, import_dirs=None)

Process jsonnet file

Source code in paasify/stack_components.py
def process_jsonnet_exec(self, file, action, data, import_dirs=None):
    "Process jsonnet file"

    # Developper init
    import_dirs = import_dirs or []
    data = data or {}
    assert isinstance(data, dict), f"Data must be dict, got: {data}"
    # assert len(import_dirs) > 2, f"Missing import dirs, got: {import_dirs}"

    # TODO: Enforce jsonnet API
    # assert action in [
    #     "metadata",
    #     "vars_default",
    #     "vars_override",
    #     "process_globals", # Testing WIP
    #     "process_transform", # Testing WIPP
    #     "docker_override",
    # ], f"Action not supported: {action}"

    # Prepare input variables
    mod_ident = os.path.splitext(os.path.basename(file))[0]
    ext_vars = {
        "parent": json.dumps(mod_ident),
        "action": json.dumps(action),
    }
    for key, val in data.items():
        ext_vars[key] = json.dumps(val)

    def try_path(dir_, rel):
        "Helper function to load a jsonnet file into memory for _jsonnet"

        if not rel:
            return None, None
            # raise RuntimeError("Got invalid filename (empty string).")

        if rel[0] == "/":
            full_path = rel
        else:
            full_path = os.path.join(dir_, rel)

        if full_path[-1] == "/":
            return None, None
            # raise RuntimeError("Attempted to import a directory")

        if not os.path.isfile(full_path):
            return full_path, None
        with open(
            full_path,
            encoding="utf-8",
        ) as file_:
            return full_path, file_.read()

    # Jsonnet import callback
    def import_callback(dir_, rel):
        "Helper function to load a jsonnet libraries in lookup paths"

        test_dirs = [dir_] + import_dirs
        for test_dir in test_dirs:
            full_path, content = try_path(test_dir, rel)
            self.log.trace(f"Load '{rel}' jsonnet from: {full_path}")
            if content:
                return full_path, content.encode()

        test_dirs = " ".join(test_dirs)
        raise RuntimeError(
            f"Jsonnet file not found '{rel}' in any of these paths: {test_dirs}"
        )

    # Process jsonnet tag
    self.log.trace(f"Process jsonnet: {file} (action={action})")
    try:
        # pylint: disable=c-extension-no-member
        result = _jsonnet.evaluate_file(
            file,
            ext_vars=ext_vars,
            import_callback=import_callback,
        )
    except RuntimeError as err:
        self.log.critical(f"Can't parse jsonnet file: {file}")
        raise error.JsonnetBuildFailed(err)

    # Return python object from json output
    result = json.loads(result)
    return result

StackDumper

Bases: PaasifyObj

StackDumper for dumping data into files for troubleshooting purpose

Source code in paasify/stack_components.py
class StackDumper(PaasifyObj):
    "StackDumper for dumping data into files for troubleshooting purpose"

    def __init__(self, path, enabled=True):

        self.out_dir = path
        self.enabled = enabled

        if self.enabled:
            self.cleanup()

    def cleanup(self):
        "Cleanup target directory"
        path = self.out_dir

        if not os.path.exists(path):
            self.log.info(f"StackDumper created directory: {path}")
            os.makedirs(path)
        for file_ in os.listdir(path):
            rm_file = os.path.join(path, file_)
            self.log.notice(f"StackDumper removed file: {rm_file}")
            os.remove(rm_file)

    def dump(self, file_name, content, fmt=None):
        "Dump any data"

        if not self.enabled:
            return

        dest = os.path.join(self.out_dir, file_name)

        if fmt == "json":
            content = to_json(content)
        elif fmt in ["yml", "yaml"]:
            content = to_yaml(content)
        elif fmt in ["pprint", "pformat"]:
            content = pformat(content) + "\n"

        if not isinstance(content, str):
            content = str(content)

        self.log.info(f"Dumping data into: {dest}")
        write_file(dest, content)

    def show_diff(self):
        "Show a colored diff outpout between files"

        if not self.enabled:
            return

        print("==== Dump differential jsonnet output")
        path = os.path.realpath(self.out_dir)
        prev = os.path.join(path, "1-docker-compose.yml")
        print(_exec("tail", cli_args=["-n", "9999", prev]))
        for file_ in sorted(os.listdir(path)):
            if file_.startswith("2-") and file_.endswith("out.yml"):
                file_ = os.path.join(path, file_)
                opts = [
                    "--color=always",
                    "-u",
                    prev,
                    file_,
                ]
                out = _exec("diff", cli_args=opts, _ok_code=[0, 1])
                print(out)
                prev = file_
cleanup()

Cleanup target directory

Source code in paasify/stack_components.py
def cleanup(self):
    "Cleanup target directory"
    path = self.out_dir

    if not os.path.exists(path):
        self.log.info(f"StackDumper created directory: {path}")
        os.makedirs(path)
    for file_ in os.listdir(path):
        rm_file = os.path.join(path, file_)
        self.log.notice(f"StackDumper removed file: {rm_file}")
        os.remove(rm_file)
dump(file_name, content, fmt=None)

Dump any data

Source code in paasify/stack_components.py
def dump(self, file_name, content, fmt=None):
    "Dump any data"

    if not self.enabled:
        return

    dest = os.path.join(self.out_dir, file_name)

    if fmt == "json":
        content = to_json(content)
    elif fmt in ["yml", "yaml"]:
        content = to_yaml(content)
    elif fmt in ["pprint", "pformat"]:
        content = pformat(content) + "\n"

    if not isinstance(content, str):
        content = str(content)

    self.log.info(f"Dumping data into: {dest}")
    write_file(dest, content)
show_diff()

Show a colored diff outpout between files

Source code in paasify/stack_components.py
def show_diff(self):
    "Show a colored diff outpout between files"

    if not self.enabled:
        return

    print("==== Dump differential jsonnet output")
    path = os.path.realpath(self.out_dir)
    prev = os.path.join(path, "1-docker-compose.yml")
    print(_exec("tail", cli_args=["-n", "9999", prev]))
    for file_ in sorted(os.listdir(path)):
        if file_.startswith("2-") and file_.endswith("out.yml"):
            file_ = os.path.join(path, file_)
            opts = [
                "--color=always",
                "-u",
                prev,
                file_,
            ]
            out = _exec("diff", cli_args=opts, _ok_code=[0, 1])
            print(out)
            prev = file_

StackTag

Bases: NodeMap, PaasifyObj

Paasify Stack object

Source code in paasify/stack_components.py
class StackTag(NodeMap, PaasifyObj):
    """Paasify Stack object"""

    conf_schema = {
        # "$schema": "http://json-schema.org/draft-07/schema#",
        "title": "StackTag configuration",
        "description": (
            "Tag definition. It support two formats at the same time: as string or dict."
            " If the name is prefixed with a `!`, then it is removed from the"
            " processing list (both vars, docker-file and jsonnet processing)."
        ),
        "oneOf": [
            {
                "title": "As string",
                "description": (
                    "Just pass the tag you want to apply as string."
                    " This form does not allow jsonnet ovar override"
                ),
                "type": "string",
                "default": "",
                "examples": [
                    {
                        "tags": [
                            "my_tagg",
                            "~my_prefix_tag",
                            "my_collection:my_prefix_tag",
                        ],
                    },
                ],
                "oneOf": [
                    {
                        "title": stack["title"],
                        "description": stack["description"],
                        "pattern": stack_pattern,
                    }
                    for stack_pattern, stack in stack_ref_defs.items()
                ],
            },
            {
                "title": "As object",
                "description": (
                    "Define a tag. The key represent the name of the"
                    " tag, while it's value is passed as vars during"
                    " jsonnet processing. This form allow jsonnet ovar override"
                ),
                "type": "object",
                "default": {},
                "examples": [
                    {
                        "tags": [
                            {
                                "other_tag": {
                                    "specific_conf": "val1",
                                }
                            },
                            {"my_collection:another_tag": None},
                            {
                                "~ignore_this_tag": {
                                    "specific_conf": "val1",
                                }
                            },
                        ],
                    },
                ],
                "minProperties": 1,
                "maxProperties": 1,
                # "additionalProperties": False,
                "patternProperties": stack_ref_defs,
            },
        ],
    }

    conf_ident = "{self.name}={self.vars}"

    conf_children = [
        {
            "key": "name",
            "cls": str,
        },
        {
            "key": "vars",
        },
    ]

    # Place to store list of candidates
    jsonnet_candidates = None
    docker_candidates = None

    # Object shortcuts
    stack = None
    prj = None
    app = None

    def node_hook_transform(self, payload):

        # Init parent objects
        self.stack = self.get_parent().get_parent()
        self.prj = self.stack.get_parent().get_parent()
        self.app = self.prj.get_parents()

        # Transform input
        result = {
            "name": None,
            "vars": {},
        }
        if isinstance(payload, str):
            result["name"] = payload

        elif isinstance(payload, dict):

            keys = list(payload.keys())
            if len(keys) == 1:
                # TODO: replace this by a common function !
                for key, val in payload.items():
                    result["name"] = key
                    result["vars"] = val
            elif len(keys) == 0:
                raise Exception(f"Missing tag name: {payload}")
            else:
                result.update(payload)

        else:
            raise Exception(f"Not supported type: {payload}")

        assert result["name"]
        return result

StackTagMgr

Bases: NodeList, PaasifyObj

Manage Stack Tags

Source code in paasify/stack_components.py
class StackTagMgr(NodeList, PaasifyObj):
    "Manage Stack Tags"

    conf_schema = {
        # "$schema": "http://json-schema.org/draft-07/schema#",
        "title": "Paasify Stack Tags configuration",
        "description": "Determine a list of tags to apply",
        "oneOf": [
            {
                "title": "List of tags",
                "description": (
                    "Define a list of tags. You can interact in few ways with"
                    " tags. Tags can support boths syntaxes at the same time."
                ),
                "type": "array",
                "default": [],
                "additionalProperties": StackTag.conf_schema,
                # "items": StackTag.conf_schema,
                "examples": [
                    {
                        "tags": [
                            "my_tagg",
                            "~my_prefix_tag",
                            "my_collection:my_prefix_tag",
                            {
                                "other_tag": {
                                    "specific_conf": "val1",
                                }
                            },
                            {"my_collection:another_tag": None},
                            {
                                "~ignore_this_tag": {
                                    "specific_conf": "val1",
                                }
                            },
                        ],
                    },
                ],
            },
            {
                "title": "Unset",
                "description": "Do not declare any tags",
                "type": "null",
                "default": None,
                "examples": [
                    {
                        "tags": None,
                    },
                ],
            },
        ],
    }

    conf_children = StackTag
    module = "paasify.cli"

    def node_hook_transform(self, payload):

        # Fetch tag lists
        tag_stack_list = payload["raw"]
        tag_prefix = payload["tag_prefix"]
        tag_suffix = payload["tag_suffix"]

        # Process keys (before parsing :/ )
        tag_stack_keys = [tag for tag in tag_stack_list if isinstance(tag, str)]
        tag_stack_keys = tag_stack_keys + [
            first(tag.keys()) for tag in tag_stack_list if isinstance(tag, dict)
        ]

        # Remove duplicates tags
        tag_items = []
        for item in tag_prefix:
            if item not in tag_stack_keys:
                tag_items.append(item)
        tag_items.extend(tag_stack_list)
        for item in tag_suffix:
            if item not in tag_stack_keys:
                tag_items.append(item)

        return tag_items

    def node_hook_final(self):
        "Remove disabled tags"

        markers = "^"
        excluded_tags = [
            tag.name for tag in self._nodes if tag.name.startswith(markers)
        ]
        if excluded_tags:
            self.log.debug(f"Disabling tags: {excluded_tags}")
        for excluded in excluded_tags:
            clean = excluded[1:]
            xclude = [excluded, clean]
            self._nodes = [tag for tag in self._nodes if tag.name not in xclude]
node_hook_final()

Remove disabled tags

Source code in paasify/stack_components.py
def node_hook_final(self):
    "Remove disabled tags"

    markers = "^"
    excluded_tags = [
        tag.name for tag in self._nodes if tag.name.startswith(markers)
    ]
    if excluded_tags:
        self.log.debug(f"Disabling tags: {excluded_tags}")
    for excluded in excluded_tags:
        clean = excluded[1:]
        xclude = [excluded, clean]
        self._nodes = [tag for tag in self._nodes if tag.name not in xclude]