mirror of
https://github.com/strongdm/comply
synced 2024-11-12 19:04:54 +00:00
Initial commit
This commit is contained in:
commit
bd7899ee31
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
comply
|
||||
output
|
0
.gitmodules
vendored
Normal file
0
.gitmodules
vendored
Normal file
3
AUTHORS.txt
Normal file
3
AUTHORS.txt
Normal file
@ -0,0 +1,3 @@
|
||||
# Authors in alphabetical order:
|
||||
|
||||
Justin McCarthy <justin@strongdm.com>
|
262
Gopkg.lock
generated
Normal file
262
Gopkg.lock
generated
Normal file
@ -0,0 +1,262 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/Microsoft/go-winio"
|
||||
packages = ["."]
|
||||
revision = "7da180ee92d8bd8bb8c37fc560e673e6557c392f"
|
||||
version = "v0.4.7"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/chzyer/readline"
|
||||
packages = ["."]
|
||||
revision = "f6d7a1f6fbf35bbf9beb80dc63c56a29dcfb759f"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/davecgh/go-spew"
|
||||
packages = ["spew"]
|
||||
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
|
||||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/docker/distribution"
|
||||
packages = [
|
||||
"digest",
|
||||
"reference"
|
||||
]
|
||||
revision = "48294d928ced5dd9b378f7fd7c6f5da3ff3f2c89"
|
||||
version = "v2.6.2"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/docker/docker"
|
||||
packages = [
|
||||
"api/types",
|
||||
"api/types/blkiodev",
|
||||
"api/types/container",
|
||||
"api/types/events",
|
||||
"api/types/filters",
|
||||
"api/types/mount",
|
||||
"api/types/network",
|
||||
"api/types/reference",
|
||||
"api/types/registry",
|
||||
"api/types/strslice",
|
||||
"api/types/swarm",
|
||||
"api/types/time",
|
||||
"api/types/versions",
|
||||
"api/types/volume",
|
||||
"client",
|
||||
"pkg/tlsconfig"
|
||||
]
|
||||
revision = "092cba3727bb9b4a2f0e922cd6c0f93ea270e363"
|
||||
version = "v1.13.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/docker/go-connections"
|
||||
packages = [
|
||||
"nat",
|
||||
"sockets",
|
||||
"tlsconfig"
|
||||
]
|
||||
revision = "3ede32e2033de7505e6500d6c868c2b9ed9f169d"
|
||||
version = "v0.3.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/docker/go-units"
|
||||
packages = ["."]
|
||||
revision = "0dadbb0345b35ec7ef35e228dabb8de89a65bf52"
|
||||
version = "v0.3.2"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/elazarl/go-bindata-assetfs"
|
||||
packages = ["."]
|
||||
revision = "30f82fa23fd844bd5bb1e5f216db87fd77b5eb43"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/fatih/color"
|
||||
packages = ["."]
|
||||
revision = "507f6050b8568533fb3f5504de8e5205fa62a114"
|
||||
version = "v1.6.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/fsnotify/fsnotify"
|
||||
packages = ["."]
|
||||
revision = "c2828203cd70a50dcccfb2761f8b1f8ceef9a8e9"
|
||||
version = "v1.4.7"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/gohugoio/hugo"
|
||||
packages = ["watcher"]
|
||||
revision = "f414966b942b5aad75565bee6c644782a07f0658"
|
||||
version = "v0.37.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/golang/protobuf"
|
||||
packages = ["proto"]
|
||||
revision = "925541529c1fa6821df4e44ce2723319eb2be768"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/google/go-github"
|
||||
packages = ["github"]
|
||||
revision = "e48060a28fac52d0f1cb758bc8b87c07bac4a87d"
|
||||
version = "v15.0.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/google/go-querystring"
|
||||
packages = ["query"]
|
||||
revision = "53e6ce116135b80d037921a7fdd5138cf32d7a8a"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/gorilla/websocket"
|
||||
packages = ["."]
|
||||
revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b"
|
||||
version = "v1.2.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/jcelliott/lumber"
|
||||
packages = ["."]
|
||||
revision = "dd349441af25132d146d7095c6693a15431fc9b1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/juju/ansiterm"
|
||||
packages = [
|
||||
".",
|
||||
"tabwriter"
|
||||
]
|
||||
revision = "720a0952cc2ac777afc295d9861263e2a4cf96a1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/lunixbochs/vtclean"
|
||||
packages = ["."]
|
||||
revision = "d14193dfc626125c831501c1c42340b4248e1f5a"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/manifoldco/promptui"
|
||||
packages = [
|
||||
".",
|
||||
"list",
|
||||
"screenbuf"
|
||||
]
|
||||
revision = "c0c0d3afc6a03bcb5c1df10b70b862a650db9f9b"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-colorable"
|
||||
packages = ["."]
|
||||
revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
|
||||
version = "v0.0.9"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-isatty"
|
||||
packages = ["."]
|
||||
revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
|
||||
version = "v0.0.3"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-runewidth"
|
||||
packages = ["."]
|
||||
revision = "9e777a8366cce605130a531d2cd6363d07ad7317"
|
||||
version = "v0.0.2"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/nanobox-io/golang-scribble"
|
||||
packages = ["."]
|
||||
revision = "ced58d671850da57ce8c11315424513b608083d7"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/olekukonko/tablewriter"
|
||||
packages = ["."]
|
||||
revision = "b8a9be070da40449e501c3c4730a889e42d87a9e"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/pkg/errors"
|
||||
packages = ["."]
|
||||
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
|
||||
version = "v0.8.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/robfig/cron"
|
||||
packages = ["."]
|
||||
revision = "b024fc5ea0e34bc3f83d9941c8d60b0622bfaca4"
|
||||
version = "v1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/skratchdot/open-golang"
|
||||
packages = ["open"]
|
||||
revision = "75fb7ed4208cf72d323d7d02fd1a5964a7a9073c"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/urfave/cli"
|
||||
packages = ["."]
|
||||
revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1"
|
||||
version = "v1.20.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/yosssi/ace"
|
||||
packages = ["."]
|
||||
revision = "ea038f4770b6746c3f8f84f14fa60d9fe1205b56"
|
||||
version = "v0.0.5"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/net"
|
||||
packages = [
|
||||
"context",
|
||||
"context/ctxhttp",
|
||||
"proxy"
|
||||
]
|
||||
revision = "d0aafc73d5cdc42264b0af071c261abac580695e"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/oauth2"
|
||||
packages = [
|
||||
".",
|
||||
"internal"
|
||||
]
|
||||
revision = "7af32f14d0a25aec7873e0683e8e48dcead159a8"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sys"
|
||||
packages = [
|
||||
"unix",
|
||||
"windows"
|
||||
]
|
||||
revision = "dd2ff4accc098aceecb86b36eaa7829b2a17b1c9"
|
||||
|
||||
[[projects]]
|
||||
name = "google.golang.org/appengine"
|
||||
packages = [
|
||||
"internal",
|
||||
"internal/base",
|
||||
"internal/datastore",
|
||||
"internal/log",
|
||||
"internal/remote_api",
|
||||
"internal/urlfetch",
|
||||
"urlfetch"
|
||||
]
|
||||
revision = "150dc57a1b433e64154302bdc40b6bb8aefa313a"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/yaml.v2"
|
||||
packages = ["."]
|
||||
revision = "7f97868eec74b32b0982dd158a51a446d1da7eb5"
|
||||
version = "v2.1.1"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "4fd2ff9f9869c3f3e30601504f4b00fce69d282ae8df42583a1c60848bfd0766"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
30
Gopkg.toml
Normal file
30
Gopkg.toml
Normal file
@ -0,0 +1,30 @@
|
||||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
#
|
||||
# [prune]
|
||||
# non-go = false
|
||||
# go-tests = true
|
||||
# unused-packages = true
|
||||
|
||||
|
||||
[prune]
|
||||
go-tests = true
|
||||
unused-packages = true
|
202
LICENSE.txt
Normal file
202
LICENSE.txt
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
35
Makefile
Normal file
35
Makefile
Normal file
@ -0,0 +1,35 @@
|
||||
.DEFAULT_GOAL := comply
|
||||
GO_SOURCES := $(shell find . -name '*.go')
|
||||
THEME_SOURCES := $(shell find themes)
|
||||
|
||||
assets: $(THEME_SOURCES)
|
||||
go-bindata-assetfs -pkg theme -prefix themes themes/...
|
||||
mv bindata_assetfs.go internal/theme/themes_bindata.go
|
||||
|
||||
comply: assets $(GO_SOURCES)
|
||||
go build github.com/strongdm/comply/cmd/comply
|
||||
|
||||
install: assets $(GO_SOURCES)
|
||||
go install github.com/strongdm/comply/cmd/comply
|
||||
|
||||
export-example:
|
||||
cp example/narratives/* themes/comply-soc2/narratives
|
||||
cp example/procedures/* themes/comply-soc2/procedures
|
||||
cp example/policies/* themes/comply-soc2/policies
|
||||
cp example/standards/* themes/comply-soc2/standards
|
||||
cp example/templates/* themes/comply-soc2/templates
|
||||
|
||||
docker:
|
||||
cd build && docker build -t strongdm/pandoc .
|
||||
docker tag jagregory/pandoc:latest strongdm/pandoc:latest
|
||||
docker push strongdm/pandoc
|
||||
|
||||
cleanse:
|
||||
git checkout --orphan newbranch
|
||||
git add -A
|
||||
git commit -m "Initial commit"
|
||||
git branch -D master
|
||||
git branch -m master
|
||||
git push -f origin master
|
||||
git gc --aggressive --prune=all
|
||||
|
44
README.md
Normal file
44
README.md
Normal file
@ -0,0 +1,44 @@
|
||||
# Comply
|
||||
|
||||
Comply is a SOC2-focused compliance automation tool. Comply features a markdown-powered **document pipeline** and a git-powered **workflow** that help policies and procedures _feel_ like software development.
|
||||
|
||||
Comply manages the lifecycle of your program throughout the year via your existing **ticketing system**.
|
||||
|
||||
In addition to automation, Comply includes a SOC2-focused module featuring open source policy and procedure **templates** suitable for satisfying a SOC2 audit.
|
||||
|
||||
# Discussion
|
||||
|
||||
Join us in [Comply Users](https://join.slack.com/t/comply-users/shared_invite/enQtMzU3MTk5MDkxNDU4LTMwYzZkMjA4YjQ2YTM5Zjc0NTAyYWY5MDBlOGMwMzRmZTk5YzBlOTRiMTVlNGRlZjY1MTY1NDE0MjY5ZjYwNWU)
|
||||
|
||||
# Screenshots
|
||||
|
||||
## Start a Project
|
||||
![screencast 1](sc-1.gif)
|
||||
|
||||
## Track Policy Coverage
|
||||
![screencast 3](sc-2.gif)
|
||||
|
||||
## Dashboard
|
||||
![screencast 2](sc-3.gif)
|
||||
|
||||
## CLI
|
||||
|
||||
```
|
||||
NAME:
|
||||
comply - policy compliance toolkit
|
||||
|
||||
USAGE:
|
||||
comply [global options] command [command options] [arguments...]
|
||||
|
||||
COMMANDS:
|
||||
build, b generate a static website summarizing the compliance program
|
||||
init initialize a new compliance repository (interactive)
|
||||
scheduler create tickets based on procedure schedule
|
||||
serve live updating version of the build command
|
||||
sync sync external systems to local data cache
|
||||
todo list declared vs satisfied compliance controls
|
||||
help, h Shows a list of commands or help for one command
|
||||
|
||||
GLOBAL OPTIONS:
|
||||
--help, -h show help
|
||||
```
|
3
build/Dockerfile
Normal file
3
build/Dockerfile
Normal file
@ -0,0 +1,3 @@
|
||||
FROM scratch
|
||||
|
||||
MAINTAINER strongDM Comply <comply@strongdm.com>
|
7
cmd/comply/comply.go
Normal file
7
cmd/comply/comply.go
Normal file
@ -0,0 +1,7 @@
|
||||
package main
|
||||
|
||||
import "github.com/strongdm/comply/internal/cli"
|
||||
|
||||
func main() {
|
||||
cli.Main()
|
||||
}
|
3
example/.gitignore
vendored
Normal file
3
example/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
output
|
||||
.comply
|
||||
comply.yml
|
1
example/README.md
Normal file
1
example/README.md
Normal file
@ -0,0 +1 @@
|
||||
# Acme Compliance
|
9
example/comply.yml.example
Normal file
9
example/comply.yml.example
Normal file
@ -0,0 +1,9 @@
|
||||
programName: "Acme Compliance"
|
||||
regimes:
|
||||
- "SOC2"
|
||||
- "ISO 27001"
|
||||
tickets:
|
||||
github:
|
||||
token: XXXX
|
||||
username: strongdm
|
||||
repo: comply
|
0
example/narratives/README.md
Normal file
0
example/narratives/README.md
Normal file
20
example/narratives/control.md
Normal file
20
example/narratives/control.md
Normal file
@ -0,0 +1,20 @@
|
||||
name: Control Environment Narrative
|
||||
acronym: CEN
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC2.1
|
||||
- CC2.2
|
||||
- CC2.3
|
||||
- CC4.1
|
||||
- CC4.2
|
||||
- CC5.1
|
||||
- CC5.2
|
||||
- CC5.3
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Control Environment Narrative
|
||||
|
||||
Here we narrate why our org satisfies the control keys listed in the YML block
|
20
example/narratives/organizational.md
Normal file
20
example/narratives/organizational.md
Normal file
@ -0,0 +1,20 @@
|
||||
name: Organizational Narrative
|
||||
acronym: ON
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC1.2
|
||||
- CC1.3
|
||||
- CC1.4
|
||||
- CC1.5
|
||||
- CC3.1
|
||||
- CC3.2
|
||||
- CC3.3
|
||||
- CC3.4
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Organizational Narrative
|
||||
|
||||
Here we narrate why our org satisfies the control keys listed in the YML block
|
13
example/narratives/products.md
Normal file
13
example/narratives/products.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Products and Services Narrative
|
||||
acronym: PSN
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.9
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Products Narrative
|
||||
|
||||
Here we describe the key products marketed by our organization
|
16
example/narratives/security.md
Normal file
16
example/narratives/security.md
Normal file
@ -0,0 +1,16 @@
|
||||
name: Security Architecture Narrative
|
||||
acronym: SEN
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.6
|
||||
- CC6.7
|
||||
- CC7.1
|
||||
- CC7.2
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Control Environment Narrative
|
||||
|
||||
Here we narrate why our org satisfies the control keys listed in the YML block
|
10
example/narratives/system.md
Normal file
10
example/narratives/system.md
Normal file
@ -0,0 +1,10 @@
|
||||
name: System Architecture Narrative
|
||||
acronym: SYN
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Control Environment Narrative
|
||||
|
||||
Here we narrate why our org satisfies the control keys listed in the YML block
|
1
example/policies/README.md
Normal file
1
example/policies/README.md
Normal file
@ -0,0 +1 @@
|
||||
# TODO Describe Policies
|
15
example/policies/access.md
Normal file
15
example/policies/access.md
Normal file
@ -0,0 +1,15 @@
|
||||
name: Access Onboarding and Termination Policy
|
||||
acronym: AOTP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.1
|
||||
- CC6.2
|
||||
- CC6.3
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
33
example/policies/application.md
Normal file
33
example/policies/application.md
Normal file
@ -0,0 +1,33 @@
|
||||
name: Application Security Policy
|
||||
acronym: ASP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.2
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The Application Security Policy governs the use of applications deemed critical to {{.Name} Information Security.
|
||||
|
||||
# Critical Applications
|
||||
|
||||
The following applications are within the scope of this policy:
|
||||
|
||||
* GitHub
|
||||
* Slack
|
||||
* Google Apps
|
||||
|
||||
Applications supporting production data operations (specifically the AWS Console) are deliberately excluded from this policy.
|
||||
|
||||
# Data Sensitivity
|
||||
|
||||
Any company proprietary data may be stored within these *[Critical Applications]*.
|
||||
|
||||
Customer support activities must be conducted entirely within the *[Critical Applications]*.
|
||||
|
||||
# Other Applications
|
||||
|
||||
Other applications not listed in *[Critical Applications]* may include company proprietary data, but must not contain any customer support or customer-owned data.
|
14
example/policies/availability.md
Normal file
14
example/policies/availability.md
Normal file
@ -0,0 +1,14 @@
|
||||
name: Availability Policy
|
||||
acronym: AP
|
||||
satisfies:
|
||||
TSC:
|
||||
- A1.1
|
||||
- CC9.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The Availability Policy governs X.
|
13
example/policies/change.md
Normal file
13
example/policies/change.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: System Change Policy
|
||||
acronym: SCP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC8.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
37
example/policies/classification.md
Normal file
37
example/policies/classification.md
Normal file
@ -0,0 +1,37 @@
|
||||
name: Data Classification Policy
|
||||
acronym: DCP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.9
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Background
|
||||
|
||||
This policy defines the high level objectives and implementation instructions for the organization’s data classification scheme. This includes data classification levels, as well as procedures for the classification, labeling and handling of data within the organization. Confidentiality and non-disclosure agreements maintained by the organization must reference this policy.
|
||||
|
||||
# Purpose and Scope
|
||||
|
||||
- This data classification policy defines the requirements to ensure that information within the organization is protected at an appropriate level.
|
||||
|
||||
- This document applies to the entire scope of the organization’s information security program. It includes all types of information, regardless of its form, such as paper or electronic documents, applications and databases, and knowledge or information that is not written.
|
||||
|
||||
- This policy applies to all individuals and systems that have access to information kept by the organization.
|
||||
|
||||
# References
|
||||
|
||||
- Risk Assessment Policy
|
||||
- Security Incident Management Policy
|
||||
|
||||
# Policy
|
||||
|
||||
- If classified information is received from outside the organization, the person who receives the information must classify it in accordance with the rules prescribed in this policy. The person thereby will become the owner of the information.
|
||||
- If classified information is received from outside the organization and handled as part of business operations activities (e.g., customer data on provided cloud services), the information classification, as well as the owner of such information, must be made in accordance with the specifications of the respective customer service agreement and other legal requirements.
|
||||
- When classifying information, the level of confidentiality is determined by:
|
||||
- The value of the information, based on impacts identified during the risk assessment process. More information on risk assessments is defined in the Risk Assessment Policy (reference (a)).
|
||||
- Sensitivity and criticality of the information, based on the highest risk calculated for each information item during the risk assessment.
|
||||
- Legal, regulatory and contractual obligations.
|
||||
|
||||
- Information must be classified based on confidentiality levels as defined in Table 1.
|
13
example/policies/conduct.md
Normal file
13
example/policies/conduct.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Code of Conduct Policy
|
||||
acronym: COCP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC1.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The Data Classification Policy governs X.
|
14
example/policies/confidentiality.md
Normal file
14
example/policies/confidentiality.md
Normal file
@ -0,0 +1,14 @@
|
||||
name: Confidentiality Policy
|
||||
acronym: CP
|
||||
satisfies:
|
||||
TSC:
|
||||
- C1.1
|
||||
- C1.2
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/continuity.md
Normal file
13
example/policies/continuity.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Business Continuity Policy
|
||||
acronym: BCP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/cyber.md
Normal file
13
example/policies/cyber.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Cyber Risk Assessment Policy
|
||||
acronym: CRP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/datacenter.md
Normal file
13
example/policies/datacenter.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Datacenter Policy
|
||||
acronym: DP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.4
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/development.md
Normal file
13
example/policies/development.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Software Development Lifecycle Policy
|
||||
acronym: SDLCP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC8.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
15
example/policies/disaster.md
Normal file
15
example/policies/disaster.md
Normal file
@ -0,0 +1,15 @@
|
||||
name: Disaster Recovery Policy
|
||||
acronym: DRP
|
||||
satisfies:
|
||||
TSC:
|
||||
- A1.2
|
||||
- A1.3
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
||||
s
|
13
example/policies/encryption.md
Normal file
13
example/policies/encryption.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Encryption Policy
|
||||
acronym: EP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.9
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
15
example/policies/incident.md
Normal file
15
example/policies/incident.md
Normal file
@ -0,0 +1,15 @@
|
||||
name: Security Incident Response Policy
|
||||
acronym: SIRP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC7.3
|
||||
- CC7.4
|
||||
- CC7.5
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
29
example/policies/information.md
Normal file
29
example/policies/information.md
Normal file
@ -0,0 +1,29 @@
|
||||
name: Information Security Policy
|
||||
acronym: ISP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.9
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The Information Security Policy is a composite policy referencing other Acme policies relevant to information security.
|
||||
|
||||
# Component Policies
|
||||
|
||||
The Acme Information Security Policy is composed of:
|
||||
|
||||
- [Application Security Policy (*Acme-ASP.pdf*)](Acme-ASP.pdf) {-}
|
||||
- [Cyber Risk Management Policy (*Acme-CRP.pdf*)](Acme-CRP.pdf) {-}
|
||||
- [Data Classification Policy (*Acme-DCP.pdf*)](Acme-DCP.pdf) {-}
|
||||
- [Data Retention Policy (*Acme-ASP.pdf*)](Acme-DRP.pdf) {-}
|
||||
- [Datacenter Security Policy (*Acme-ASP.pdf*)](Acme-DSP.pdf) {-}
|
||||
- [Encryption Policy (*Acme-ASP.pdf*)](Acme-EP.pdf) {-}
|
||||
- [Password Policy (*Acme-ASP.pdf*)](Acme-PWP.pdf) {-}
|
||||
- [Remote Access Policy (*Acme-ASP.pdf*)](Acme-REAP.pdf) {-}
|
||||
- [Removable Media Policy (*Acme-ASP.pdf*)](Acme-RMP.pdf) {-}
|
||||
- [Security Incident Response Policy (*Acme-ASP.pdf*)](Acme-SIRP.pdf) {-}
|
||||
- [Workstation Security Policy (*Acme-ASP.pdf*)](Acme-WSP.pdf) {-}
|
13
example/policies/log.md
Normal file
13
example/policies/log.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Log Management Policy
|
||||
acronym: LMP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC7.2
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/media.md
Normal file
13
example/policies/media.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Removable Media and Cloud Storage Policy
|
||||
acronym: MCP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.7
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/office.md
Normal file
13
example/policies/office.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Office Security Policy
|
||||
acronym: OSP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.4
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/password.md
Normal file
13
example/policies/password.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Password Policy
|
||||
acronym: PWP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.9
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
21
example/policies/policy.md
Normal file
21
example/policies/policy.md
Normal file
@ -0,0 +1,21 @@
|
||||
name: Policy Training Policy
|
||||
acronym: PTP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.9
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The Policy Training Policy addresses policy education requirements for Acme employees and contractors.
|
||||
|
||||
# Adherence
|
||||
|
||||
Assignees are reminded that adherence to assigned policies is binding under the terms of their Acme Employment Offer Letter and/or their Acme Independent Contractor Agreement.
|
||||
|
||||
# Applicability
|
||||
|
||||
Upon each full-time, part-time or contractor addition, the hiring manager determines which subset of of Acme Policies apply to that individual. The individual is tasked with reading the assigned policies within 5 working days. The initial assignment date, scope, and completion date are entered into the [Ledger].
|
30
example/policies/privacy.md
Normal file
30
example/policies/privacy.md
Normal file
@ -0,0 +1,30 @@
|
||||
name: Privacy Management Policy
|
||||
acronym: PMP
|
||||
satisfies:
|
||||
TSC:
|
||||
- P1.1
|
||||
- P2.1
|
||||
- P3.1
|
||||
- P3.2
|
||||
- P4.1
|
||||
- P4.2
|
||||
- P4.3
|
||||
- P5.1
|
||||
- P5.2
|
||||
- P6.1
|
||||
- P6.2
|
||||
- P6.3
|
||||
- P6.4
|
||||
- P6.5
|
||||
- P6.6
|
||||
- P6.7
|
||||
- P7.1
|
||||
- P8.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
17
example/policies/processing.md
Normal file
17
example/policies/processing.md
Normal file
@ -0,0 +1,17 @@
|
||||
name: Processing Integrity Management Policy
|
||||
acronym: PIMP
|
||||
satisfies:
|
||||
TSC:
|
||||
- PI1.1
|
||||
- PI1.2
|
||||
- PI1.3
|
||||
- PI1.4
|
||||
- PI1.5
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
15
example/policies/remote.md
Normal file
15
example/policies/remote.md
Normal file
@ -0,0 +1,15 @@
|
||||
name: Remote Access Policy
|
||||
acronym: REAP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.1
|
||||
- CC6.2
|
||||
- CC6.7
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
15
example/policies/retention.md
Normal file
15
example/policies/retention.md
Normal file
@ -0,0 +1,15 @@
|
||||
name: Data Retention Policy
|
||||
acronym: RP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC1.2
|
||||
- CC6.5
|
||||
- P4.2
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/risk.md
Normal file
13
example/policies/risk.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Risk Assessment Policy
|
||||
acronym: RIAP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.1
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/vendor.md
Normal file
13
example/policies/vendor.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Vendor Management Policy
|
||||
acronym: VMP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC9.2
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
13
example/policies/workstation.md
Normal file
13
example/policies/workstation.md
Normal file
@ -0,0 +1,13 @@
|
||||
name: Workstation Policy
|
||||
acronym: WP
|
||||
satisfies:
|
||||
TSC:
|
||||
- CC6.8
|
||||
majorRevisions:
|
||||
- date: Jun 1 2018
|
||||
comment: Initial document
|
||||
---
|
||||
|
||||
# Overview
|
||||
|
||||
The XXX Policy governs X.
|
1
example/procedures/README.md
Normal file
1
example/procedures/README.md
Normal file
@ -0,0 +1 @@
|
||||
# TODO Describe Procedures
|
13
example/procedures/offboarding.md
Normal file
13
example/procedures/offboarding.md
Normal file
@ -0,0 +1,13 @@
|
||||
id: "offboard"
|
||||
name: "Offboard User"
|
||||
---
|
||||
|
||||
# Onboarding Steps
|
||||
|
||||
- [ ] Determine github username and assign to correct Org
|
||||
- [ ] Create Slack account
|
||||
- [ ] Determine and assign IAM role
|
||||
|
||||
# Attach Evidence
|
||||
|
||||
No evidence beyond activity logs within Slack, Github
|
13
example/procedures/onboarding.md
Normal file
13
example/procedures/onboarding.md
Normal file
@ -0,0 +1,13 @@
|
||||
id: "onboard"
|
||||
name: "Onboard New User"
|
||||
---
|
||||
|
||||
# Onboarding Steps
|
||||
|
||||
- [ ] Determine github username and assign to correct Org
|
||||
- [ ] Create Slack account
|
||||
- [ ] Determine and assign IAM role
|
||||
|
||||
# Attach Evidence
|
||||
|
||||
No evidence beyond activity logs within Slack, Github
|
11
example/procedures/patch.md
Normal file
11
example/procedures/patch.md
Normal file
@ -0,0 +1,11 @@
|
||||
id: "patch"
|
||||
name: "Apply OS patches"
|
||||
cron: "0 0 1 * * *"
|
||||
---
|
||||
|
||||
# Production Environment
|
||||
|
||||
- [ ] View patchlevel report in OpenVAS
|
||||
- [ ] Apply patches using Ansible playbooks
|
||||
- [ ] AWS us-west-2
|
||||
- [ ] Reston Datacenter
|
13
example/procedures/workstation.md
Normal file
13
example/procedures/workstation.md
Normal file
@ -0,0 +1,13 @@
|
||||
id: "workstation"
|
||||
name: "Collect Workstation Details"
|
||||
cron: "0 0 * * * *"
|
||||
---
|
||||
|
||||
# Workstation Details
|
||||
|
||||
- [ ] E-mail all users requesting confirmation of drive encryption
|
||||
- [ ] E-mail all users requesting confirmation of antivirus / antimalware configuration
|
||||
|
||||
# Insert Evidence
|
||||
|
||||
Insert evidence into the Evidence Vault
|
5
example/standards/README.md
Normal file
5
example/standards/README.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Compliance Standards
|
||||
|
||||
All `yaml` files in this directory are assumed to conform to https://github.com/opencontrol/schemas/tree/master/kwalify/standard
|
||||
|
||||
Adjust the target standard for this project by adding or removing line-items within each file, or adding/removing a standard file entirely.
|
245
example/standards/TSC-2017.yml
Normal file
245
example/standards/TSC-2017.yml
Normal file
@ -0,0 +1,245 @@
|
||||
name: TSC
|
||||
CC1.1:
|
||||
family: CC1
|
||||
name: Integrity and Ethics
|
||||
description: The entity demonstrates a commitment to integrity and ethical values
|
||||
CC1.2:
|
||||
family: CC1
|
||||
name: Board Independence
|
||||
description: The board of directors demonstrates independence from management and exercises oversight of the development and performance of internal control
|
||||
CC1.3:
|
||||
family: CC1
|
||||
name: Organizational Structure
|
||||
description: Management establishes, with board oversight, structures, reporting lines, and appropriate authorities and responsibilities in the pursuit of objectives
|
||||
CC1.4:
|
||||
family: CC1
|
||||
name: Hiring, Training and Retention
|
||||
description: The entity demonstrates a commitment to attract, develop, and retain competent individuals in alignment with objectives
|
||||
CC1.5:
|
||||
family: CC1
|
||||
name: Individual Accountability
|
||||
description: The entity holds individuals accountable for their internal control responsibilities in the pursuit of objectives.
|
||||
CC2.1:
|
||||
family: CC2
|
||||
name: Use of Information Systems
|
||||
description: The entity obtains or generates and uses relevant, quality information to support the functioning of internal control
|
||||
CC2.2:
|
||||
family: CC2
|
||||
name: Use of Communication Systems, Internal
|
||||
description: The entity internally communicates information, including objectives and responsibilities for internal control, necessary to support the functioning of internal control
|
||||
CC2.3:
|
||||
family: CC2
|
||||
name: Use of Communication Systems, External
|
||||
description: The entity communicates with external parties regarding matters affecting the functioning of internal control
|
||||
CC3.1:
|
||||
family: CC3
|
||||
name: Objectives
|
||||
description: The entity specifies objectives with sufficient clarity to enable the identification and assessment of risks relating to objectives
|
||||
CC3.2:
|
||||
family: CC3
|
||||
name: Risk to Objectives
|
||||
description: The entity identifies risks to the achievement of its objectives across the entity and analyzes risks as a basis for determining how the risks should be managed
|
||||
CC3.3:
|
||||
family: CC3
|
||||
name: Fraud Risk to Objectives
|
||||
description: The entity considers the potential for fraud in assessing risks to the achievement of objectives
|
||||
CC3.4:
|
||||
family: CC3
|
||||
name: Impact of Changes
|
||||
description: The entity identifies and assesses changes that could significantly impact the system of internal control
|
||||
CC4.1:
|
||||
family: CC4
|
||||
name: Monitoring
|
||||
description: The entity selects, develops, and performs ongoing and/or separate evaluations to ascertain whether the components of internal control are present and functioning
|
||||
CC4.2:
|
||||
family: CC4
|
||||
name: Remediation
|
||||
description: The entity evaluates and communicates internal control deficiencies in a timely manner to those parties responsible for taking corrective action, including senior management and the board of directors, as appropriate
|
||||
CC5.1:
|
||||
family: CC5
|
||||
name: Objective Risk Mitigation
|
||||
description: The entity selects and develops control activities that contribute to the mitigation of risks to the achievement of objectives to acceptable levels
|
||||
CC5.2:
|
||||
family: CC5
|
||||
name: Technology Controls
|
||||
description: The entity also selects and develops general control activities over technology to support the achievement of objectives
|
||||
CC5.3:
|
||||
family: CC5
|
||||
name: Established Policies
|
||||
description: The entity deploys control activities through policies that establish what is expected and in procedures that put policies into action
|
||||
CC6.1:
|
||||
family: CC6
|
||||
name: Logical Access
|
||||
description: The entity implements logical access security software, infrastructure, and architectures over protected information assets to protect them from security events to meet the entity’s objectives
|
||||
CC6.2:
|
||||
family: CC6
|
||||
name: User Access
|
||||
description: Prior to issuing system credentials and granting system access, the entity registers and authorizes new internal and external users whose access is administered by the entity. For those users whose access is administered by the entity, user system credentials are removed when user access is no longer authorized
|
||||
CC6.3:
|
||||
family: CC6
|
||||
name: Role-Based Access
|
||||
description: The entity authorizes, modifies, or removes access to data, software, functions, and other protected information assets based on roles, responsibilities, or the system design and changes, giving consideration to the concepts of least privilege and segregation of duties, to meet the entity’s objectives
|
||||
CC6.4:
|
||||
family: CC6
|
||||
name: Physical Access
|
||||
description: The entity restricts physical access to facilities and protected information assets (for example, data center facilities, back-up media storage, and other sensitive locations) to authorized personnel to meet the entity’s objectives
|
||||
CC6.5:
|
||||
family: CC6
|
||||
name: Data Disposal
|
||||
description: The entity discontinues logical and physical protections over physical assets only after the ability to read or recover data and software from those assets has been diminished and is no longer required to meet the entity’s objectives
|
||||
CC6.6:
|
||||
family: CC6
|
||||
name: External Threats
|
||||
description: The entity implements logical access security measures to protect against threats from sources outside its system boundaries
|
||||
CC6.7:
|
||||
family: CC6
|
||||
name: Data Custody and Transmission
|
||||
description: The entity restricts the transmission, movement, and removal of information to authorized internal and external users and processes, and protects it during transmission, movement, or removal to meet the entity’s objectives
|
||||
CC6.8:
|
||||
family: CC6
|
||||
name: Malware Detection
|
||||
description: The entity implements controls to prevent or detect and act upon the introduction of unauthorized or malicious software to meet the entity’s objectives
|
||||
CC7.1:
|
||||
family: CC7
|
||||
name: Vulnerability Detection
|
||||
description: To meet its objectives, the entity uses detection and monitoring procedures to identify (1) changes to configurations that result in the introduction of new vulnerabilities, and (2) susceptibilities to newly discovered vulnerabilities
|
||||
CC7.2:
|
||||
family: CC7
|
||||
name: Anomaly Detection
|
||||
description: The entity monitors system components and the operation of those components for anomalies that are indicative of malicious acts, natural disasters, and errors affecting the entity’s ability to meet its objectives; anomalies are analyzed to determine whether they represent security events
|
||||
CC7.3:
|
||||
family: CC7
|
||||
name: Security Incident Evaluation
|
||||
description: The entity evaluates security events to determine whether they could or have resulted in a failure of the entity to meet its objectives (security incidents) and, if so, takes actions to prevent or address such failures
|
||||
CC7.4:
|
||||
family: CC7
|
||||
name: Security Incident Response Plan
|
||||
description: The entity responds to identified security incidents by executing a defined incident response program to understand, contain, remediate, and communicate security incidents, as appropriate
|
||||
CC7.5:
|
||||
family: CC7
|
||||
name: Security Incident Response Execution
|
||||
description: The entity identifies, develops, and implements activities to recover from identified security incidents
|
||||
CC8.1:
|
||||
family: CC8
|
||||
name: Change Control
|
||||
description: The entity authorizes, designs, develops or acquires, configures, documents, tests, approves, and implements changes to infrastructure, data, software, and procedures to meet its objectives
|
||||
CC9.1:
|
||||
family: CC9
|
||||
name: Disruption Risk Mitigation
|
||||
description: The entity identifies, selects, and develops risk mitigation activities for risks arising from potential business disruptions
|
||||
CC9.2:
|
||||
family: CC9
|
||||
name: Vendor Risk Management
|
||||
description: The entity assesses and manages risks associated with vendors and business partners
|
||||
A1.1:
|
||||
family: A1
|
||||
name: Capacity Planning
|
||||
description: The entity maintains, monitors, and evaluates current processing capacity and use of system components (infrastructure, data, and software) to manage capacity demand and to enable the implementation of additional capacity to help meet its objectives
|
||||
A1.2:
|
||||
family: A1
|
||||
name: Backup and Recovery
|
||||
description: The entity authorizes, designs, develops or acquires, implements, operates, approves, maintains, and monitors environmental protections, software, data back-up processes, and recovery infrastructure to meet its objectives
|
||||
A1.3:
|
||||
family: A1
|
||||
name: Recovery Testing
|
||||
description: The entity tests recovery plan procedures supporting system recovery to meet its objectives
|
||||
C1.1:
|
||||
family: C1
|
||||
name: Confidential Information Identification
|
||||
description: The entity identifies and maintains confidential information to meet the entity’s objectives related to confidentiality
|
||||
C1.2:
|
||||
family: C1
|
||||
name: Confidential Information Disposal
|
||||
description: The entity disposes of confidential information to meet the entity’s objectives related to confidentiality.
|
||||
PI1.1:
|
||||
family: PI1
|
||||
name: Processing Integrity Monitoring
|
||||
description: The entity obtains or generates, uses, and communicates relevant, quality information regarding the objectives related to processing, including de nitions of data processed and product and service speci cations, to support the use of products and services
|
||||
PI1.2:
|
||||
family: PI1
|
||||
name: Processing Integrity Accuracy
|
||||
description: The entity implements policies and procedures over system inputs, including controls over completeness and accuracy, to result in products, services, and reporting to meet the entity’s objectives
|
||||
PI1.3:
|
||||
family: PI1
|
||||
name: Processing Integrity Operations
|
||||
description: The entity implements policies and procedures over system processing to result in products, services, and reporting to meet the entity’s objectives
|
||||
PI1.4:
|
||||
family: PI1
|
||||
name: Processing Integrity Outputs
|
||||
description: The entity implements policies and procedures to make available or deliver output completely, accurately, and timely in accordance with speci cations to meet the entity’s objectives
|
||||
PI1.5:
|
||||
family: PI1
|
||||
name: Processing Integrity Backups
|
||||
description: The entity implements policies and procedures to store inputs, items in processing, and outputs completely, accurately, and timely in accordance with system speci cations to meet the entity’s objectives
|
||||
P1.1:
|
||||
family: P1
|
||||
name: Privacy Notification
|
||||
description: The entity provides notice to data subjects about its privacy practices to meet the entity’s objectives related to privacy. The notice is updated and communicated to data subjects in a timely manner for changes to the entity’s privacy practices, including changes in the use of personal information, to meet the entity’s objectives related to privacy
|
||||
P2.1:
|
||||
family: P2
|
||||
name: Privacy Consent and Choice
|
||||
description: The entity communicates choices available regarding the collection, use, retention, disclosure, and disposal of personal information to the data subjects and the consequences, if any, of each choice. Explicit consent for the collection, use, retention, disclosure, and disposal of personal information is obtained from data subjects or other authorized persons, if required. Such consent is obtained only for the intended purpose of the information to meet the entity’s objectives related to privacy. The entity’s basis for determining implicit consent for the collection, use, retention, disclosure, and disposal of personal information is documented
|
||||
P3.1:
|
||||
family: P3
|
||||
name: Personal Information Collection
|
||||
description: Personal information is collected consistent with the entity’s objectives related to privacy
|
||||
P3.2:
|
||||
family: P3
|
||||
name: Explicit Consent
|
||||
description: For information requiring explicit consent, the entity communicates the need for such consent, as well as the consequences of a failure to provide consent for the request for personal information, and obtains the consent prior to the collection of the information to meet the entity’s objectives related to privacy
|
||||
P4.1:
|
||||
family: P4
|
||||
name: Proper Use of Personal Information
|
||||
description: The entity limits the use of personal information to the purposes identified in the entity’s objectives related to privacy
|
||||
P4.2:
|
||||
family: P4
|
||||
name: Personal Information Retention
|
||||
description: The entity retains personal information consistent with the entity’s objectives related to privacy
|
||||
P4.3:
|
||||
family: P4
|
||||
name: Personal Information Disposal
|
||||
description: The entity securely disposes of personal information to meet the entity’s objectives related to privacy
|
||||
P5.1:
|
||||
family: P5
|
||||
name: Data Subject Access
|
||||
description: The entity grants identified and authenticated data subjects the ability to access their stored personal information for review and, upon request, provides physical or electronic copies of that information to data subjects to meet the entity’s objectives related to privacy. If access is denied, data subjects are informed of the denial and reason for such denial, as required, to meet the entity’s objectives related to privacy
|
||||
P5.2:
|
||||
family: P5
|
||||
name: Data Subject Amendment
|
||||
description: The entity corrects, amends, or appends personal information based on information provided by data subjects and communicates such information to third parties, as committed or required, to meet the entity’s objectives related to privacy. If a request for correction is denied, data subjects are informed of the denial and reason for such denial to meet the entity’s objectives related to privacy
|
||||
P6.1:
|
||||
family: P6
|
||||
name: Consent for Third Party Disclosure
|
||||
description: The entity discloses personal information to third parties with the explicit consent of data subjects, and such consent is obtained prior to disclosure to meet the entity’s objectives related to privacy
|
||||
P6.2:
|
||||
family: P6
|
||||
name: Authorized Disclosures
|
||||
description: The entity creates and retains a complete, accurate, and timely record of authorized disclosures of personal information to meet the entity’s objectives related to privacy
|
||||
P6.3:
|
||||
family: P6
|
||||
name: Unauthorized Disclosures
|
||||
description: The entity creates and retains a complete, accurate, and timely record of detected or reported unauthorized disclosures (including breaches) of personal information to meet the entity’s objectives related to privacy
|
||||
P6.4:
|
||||
family: P6
|
||||
name: Appropriate Third Party Disclosure
|
||||
description: The entity obtains privacy commitments from vendors and other third parties who have access to personal information to meet the entity’s objectives related to privacy. The entity assesses those parties’ compliance on a periodic and as-needed basis and takes corrective action, if necessary
|
||||
P6.5:
|
||||
family: P6
|
||||
name: Unauthorized Third Party Disclosure
|
||||
description: The entity obtains commitments from vendors and other third parties with access to personal information to notify the entity in the event of actual or suspected unauthorized disclosures of personal information. Such notifications are reported to appropriate personnel and acted on in accordance with established incident response procedures to meet the entity’s objectives related to privacy
|
||||
P6.6:
|
||||
family: P6
|
||||
name: Notification of Unauthorized Third Party Disclosure
|
||||
description: The entity provides notification of breaches and incidents to affected data subjects, regulators, and others to meet the entity’s objectives related to privacy
|
||||
P6.7:
|
||||
family: P6
|
||||
name: Accounting of Personal Information
|
||||
description: The entity provides data subjects with an accounting of the personal information held and disclosure of the data subjects’ personal information, upon the data subjects’ request, to meet the entity’s objectives related to privacy
|
||||
P7.1:
|
||||
family: P7
|
||||
name: Accuracy of Personal Information
|
||||
description: The entity collects and maintains accurate, up-to-date, complete, and relevant personal information to meet the entity’s objectives related to privacy
|
||||
P8.1:
|
||||
family: P8
|
||||
name: Personal Information Dispute Resolution
|
||||
description: The entity implements a process for receiving, addressing, resolving, and communicating the resolution of inquiries, complaints, and disputes from data subjects and others and periodically monitors compliance to meet the entity’s objectives related to privacy. Corrections and other necessary actions related to identified deficiencies are made or taken in a timely manner
|
280
example/templates/default.latex
Normal file
280
example/templates/default.latex
Normal file
@ -0,0 +1,280 @@
|
||||
\documentclass[$if(fontsize)$$fontsize$,$endif$$if(lang)$$babel-lang$,$endif$$if(papersize)$$papersize$paper,$endif$$for(classoption)$$classoption$$sep$,$endfor$]{$documentclass$}
|
||||
$if(beamerarticle)$
|
||||
\usepackage{beamerarticle} % needs to be loaded first
|
||||
$endif$
|
||||
$if(fontfamily)$
|
||||
\usepackage[$for(fontfamilyoptions)$$fontfamilyoptions$$sep$,$endfor$]{$fontfamily$}
|
||||
$else$
|
||||
\usepackage{lmodern}
|
||||
$endif$
|
||||
$if(linestretch)$
|
||||
\usepackage{setspace}
|
||||
\setstretch{$linestretch$}
|
||||
$endif$
|
||||
\usepackage{amssymb,amsmath}
|
||||
\usepackage{ifxetex,ifluatex}
|
||||
\usepackage{fixltx2e} % provides \textsubscript
|
||||
\ifnum 0\ifxetex 1\fi\ifluatex 1\fi=0 % if pdftex
|
||||
\usepackage[$if(fontenc)$$fontenc$$else$T1$endif$]{fontenc}
|
||||
\usepackage[utf8]{inputenc}
|
||||
$if(euro)$
|
||||
\usepackage{eurosym}
|
||||
$endif$
|
||||
\else % if luatex or xelatex
|
||||
\ifxetex
|
||||
\usepackage{mathspec}
|
||||
\else
|
||||
\usepackage{fontspec}
|
||||
\fi
|
||||
\defaultfontfeatures{Ligatures=TeX,Scale=MatchLowercase}
|
||||
$for(fontfamilies)$
|
||||
\newfontfamily{$fontfamilies.name$}[$fontfamilies.options$]{$fontfamilies.font$}
|
||||
$endfor$
|
||||
$if(euro)$
|
||||
\newcommand{\euro}{€}
|
||||
$endif$
|
||||
$if(mainfont)$
|
||||
\setmainfont[$for(mainfontoptions)$$mainfontoptions$$sep$,$endfor$]{$mainfont$}
|
||||
$endif$
|
||||
$if(sansfont)$
|
||||
\setsansfont[$for(sansfontoptions)$$sansfontoptions$$sep$,$endfor$]{$sansfont$}
|
||||
$endif$
|
||||
$if(monofont)$
|
||||
\setmonofont[Mapping=tex-ansi$if(monofontoptions)$,$for(monofontoptions)$$monofontoptions$$sep$,$endfor$$endif$]{$monofont$}
|
||||
$endif$
|
||||
$if(mathfont)$
|
||||
\setmathfont(Digits,Latin,Greek)[$for(mathfontoptions)$$mathfontoptions$$sep$,$endfor$]{$mathfont$}
|
||||
$endif$
|
||||
$if(CJKmainfont)$
|
||||
\usepackage{xeCJK}
|
||||
\setCJKmainfont[$for(CJKoptions)$$CJKoptions$$sep$,$endfor$]{$CJKmainfont$}
|
||||
$endif$
|
||||
\fi
|
||||
% use upquote if available, for straight quotes in verbatim environments
|
||||
\IfFileExists{upquote.sty}{\usepackage{upquote}}{}
|
||||
% use microtype if available
|
||||
\IfFileExists{microtype.sty}{%
|
||||
\usepackage[$for(microtypeoptions)$$microtypeoptions$$sep$,$endfor$]{microtype}
|
||||
\UseMicrotypeSet[protrusion]{basicmath} % disable protrusion for tt fonts
|
||||
}{}
|
||||
\PassOptionsToPackage{hyphens}{url} % url is loaded by hyperref
|
||||
$if(verbatim-in-note)$
|
||||
\usepackage{fancyvrb}
|
||||
$endif$
|
||||
\usepackage[unicode=true]{hyperref}
|
||||
$if(colorlinks)$
|
||||
\PassOptionsToPackage{usenames,dvipsnames}{color} % color is loaded by hyperref
|
||||
$endif$
|
||||
\hypersetup{
|
||||
$if(title-meta)$
|
||||
pdftitle={$title-meta$},
|
||||
$endif$
|
||||
$if(author-meta)$
|
||||
pdfauthor={$author-meta$},
|
||||
$endif$
|
||||
$if(keywords)$
|
||||
pdfkeywords={$for(keywords)$$keywords$$sep$, $endfor$},
|
||||
$endif$
|
||||
$if(colorlinks)$
|
||||
colorlinks=true,
|
||||
linkcolor=$if(linkcolor)$$linkcolor$$else$Maroon$endif$,
|
||||
citecolor=$if(citecolor)$$citecolor$$else$Blue$endif$,
|
||||
urlcolor=$if(urlcolor)$$urlcolor$$else$Blue$endif$,
|
||||
$else$
|
||||
pdfborder={0 0 0},
|
||||
$endif$
|
||||
breaklinks=true}
|
||||
\urlstyle{same} % don't use monospace font for urls
|
||||
$if(verbatim-in-note)$
|
||||
\VerbatimFootnotes % allows verbatim text in footnotes
|
||||
$endif$
|
||||
$if(geometry)$
|
||||
\usepackage[$for(geometry)$$geometry$$sep$,$endfor$]{geometry}
|
||||
$endif$
|
||||
$if(lang)$
|
||||
\ifnum 0\ifxetex 1\fi\ifluatex 1\fi=0 % if pdftex
|
||||
\usepackage[shorthands=off,$for(babel-otherlangs)$$babel-otherlangs$,$endfor$main=$babel-lang$]{babel}
|
||||
$if(babel-newcommands)$
|
||||
$babel-newcommands$
|
||||
$endif$
|
||||
\else
|
||||
\usepackage{polyglossia}
|
||||
\setmainlanguage[$polyglossia-lang.options$]{$polyglossia-lang.name$}
|
||||
$for(polyglossia-otherlangs)$
|
||||
\setotherlanguage[$polyglossia-otherlangs.options$]{$polyglossia-otherlangs.name$}
|
||||
$endfor$
|
||||
\fi
|
||||
$endif$
|
||||
$if(natbib)$
|
||||
\usepackage{natbib}
|
||||
\bibliographystyle{$if(biblio-style)$$biblio-style$$else$plainnat$endif$}
|
||||
$endif$
|
||||
$if(biblatex)$
|
||||
\usepackage[$if(biblio-style)$style=$biblio-style$,$endif$$for(biblatexoptions)$$biblatexoptions$$sep$,$endfor$]{biblatex}
|
||||
$for(bibliography)$
|
||||
\addbibresource{$bibliography$}
|
||||
$endfor$
|
||||
$endif$
|
||||
$if(listings)$
|
||||
\usepackage{listings}
|
||||
$endif$
|
||||
$if(lhs)$
|
||||
\lstnewenvironment{code}{\lstset{language=Haskell,basicstyle=\small\ttfamily}}{}
|
||||
$endif$
|
||||
$if(highlighting-macros)$
|
||||
$highlighting-macros$
|
||||
$endif$
|
||||
$if(tables)$
|
||||
\usepackage{longtable,booktabs}
|
||||
% Fix footnotes in tables (requires footnote package)
|
||||
\IfFileExists{footnote.sty}{\usepackage{footnote}\makesavenoteenv{long table}}{}
|
||||
$endif$
|
||||
$if(graphics)$
|
||||
\usepackage{graphicx,grffile}
|
||||
\makeatletter
|
||||
\def\maxwidth{\ifdim\Gin@nat@width>\linewidth\linewidth\else\Gin@nat@width\fi}
|
||||
\def\maxheight{\ifdim\Gin@nat@height>\textheight\textheight\else\Gin@nat@height\fi}
|
||||
\makeatother
|
||||
% Scale images if necessary, so that they will not overflow the page
|
||||
% margins by default, and it is still possible to overwrite the defaults
|
||||
% using explicit options in \includegraphics[width, height, ...]{}
|
||||
\setkeys{Gin}{width=\maxwidth,height=\maxheight,keepaspectratio}
|
||||
$endif$
|
||||
$if(links-as-notes)$
|
||||
% Make links footnotes instead of hotlinks:
|
||||
\renewcommand{\href}[2]{#2\footnote{\url{#1}}}
|
||||
$endif$
|
||||
$if(strikeout)$
|
||||
\usepackage[normalem]{ulem}
|
||||
% avoid problems with \sout in headers with hyperref:
|
||||
\pdfstringdefDisableCommands{\renewcommand{\sout}{}}
|
||||
$endif$
|
||||
$if(indent)$
|
||||
$else$
|
||||
\IfFileExists{parskip.sty}{%
|
||||
\usepackage{parskip}
|
||||
}{% else
|
||||
\setlength{\parindent}{0pt}
|
||||
\setlength{\parskip}{6pt plus 2pt minus 1pt}
|
||||
}
|
||||
$endif$
|
||||
\setlength{\emergencystretch}{3em} % prevent overfull lines
|
||||
\providecommand{\tightlist}{%
|
||||
\setlength{\itemsep}{0pt}\setlength{\parskip}{0pt}}
|
||||
$if(numbersections)$
|
||||
\setcounter{secnumdepth}{$if(secnumdepth)$$secnumdepth$$else$5$endif$}
|
||||
$else$
|
||||
\setcounter{secnumdepth}{0}
|
||||
$endif$
|
||||
$if(subparagraph)$
|
||||
$else$
|
||||
% Redefines (sub)paragraphs to behave more like sections
|
||||
\ifx\paragraph\undefined\else
|
||||
\let\oldparagraph\paragraph
|
||||
\renewcommand{\paragraph}[1]{\oldparagraph{#1}\mbox{}}
|
||||
\fi
|
||||
\ifx\subparagraph\undefined\else
|
||||
\let\oldsubparagraph\subparagraph
|
||||
\renewcommand{\subparagraph}[1]{\oldsubparagraph{#1}\mbox{}}
|
||||
\fi
|
||||
$endif$
|
||||
$if(dir)$
|
||||
\ifxetex
|
||||
% load bidi as late as possible as it modifies e.g. graphicx
|
||||
$if(latex-dir-rtl)$
|
||||
\usepackage[RTLdocument]{bidi}
|
||||
$else$
|
||||
\usepackage{bidi}
|
||||
$endif$
|
||||
\fi
|
||||
\ifnum 0\ifxetex 1\fi\ifluatex 1\fi=0 % if pdftex
|
||||
\TeXXeTstate=1
|
||||
\newcommand{\RL}[1]{\beginR #1\endR}
|
||||
\newcommand{\LR}[1]{\beginL #1\endL}
|
||||
\newenvironment{RTL}{\beginR}{\endR}
|
||||
\newenvironment{LTR}{\beginL}{\endL}
|
||||
\fi
|
||||
$endif$
|
||||
|
||||
% set default figure placement to htbp
|
||||
\makeatletter
|
||||
\def\fps@figure{htbp}
|
||||
\makeatother
|
||||
|
||||
$if(header-includes)$
|
||||
\usepackage{fancyhdr}
|
||||
\pagestyle{fancy}
|
||||
\fancyhead{}
|
||||
\fancyhead[RO,RE]{$head-content$}
|
||||
\fancyfoot[LO,LE]{$foot-content$}
|
||||
$endif$
|
||||
|
||||
$if(title)$
|
||||
\title{$title$$if(thanks)$\thanks{$thanks$}$endif$}
|
||||
$endif$
|
||||
$if(subtitle)$
|
||||
\providecommand{\subtitle}[1]{}
|
||||
\subtitle{$subtitle$}
|
||||
$endif$
|
||||
$if(author)$
|
||||
\author{$for(author)$$author$$sep$ \and $endfor$}
|
||||
$endif$
|
||||
$if(institute)$
|
||||
\providecommand{\institute}[1]{}
|
||||
\institute{$for(institute)$$institute$$sep$ \and $endfor$}
|
||||
$endif$
|
||||
\date{$date$}
|
||||
|
||||
\begin{document}
|
||||
$if(title)$
|
||||
\maketitle
|
||||
$endif$
|
||||
$if(abstract)$
|
||||
\begin{abstract}
|
||||
$abstract$
|
||||
\end{abstract}
|
||||
$endif$
|
||||
|
||||
$for(include-before)$
|
||||
$include-before$
|
||||
|
||||
$endfor$
|
||||
$if(toc)$
|
||||
{
|
||||
$if(colorlinks)$
|
||||
\hypersetup{linkcolor=$if(toccolor)$$toccolor$$else$black$endif$}
|
||||
$endif$
|
||||
\setcounter{tocdepth}{$toc-depth$}
|
||||
\tableofcontents
|
||||
}
|
||||
$endif$
|
||||
$if(lot)$
|
||||
\listoftables
|
||||
$endif$
|
||||
$if(lof)$
|
||||
\listoffigures
|
||||
$endif$
|
||||
$body$
|
||||
|
||||
$if(natbib)$
|
||||
$if(bibliography)$
|
||||
$if(biblio-title)$
|
||||
$if(book-class)$
|
||||
\renewcommand\bibname{$biblio-title$}
|
||||
$else$
|
||||
\renewcommand\refname{$biblio-title$}
|
||||
$endif$
|
||||
$endif$
|
||||
\bibliography{$for(bibliography)$$bibliography$$sep$,$endfor$}
|
||||
|
||||
$endif$
|
||||
$endif$
|
||||
$if(biblatex)$
|
||||
\printbibliography$if(biblio-title)$[title=$biblio-title$]$endif$
|
||||
|
||||
$endif$
|
||||
$for(include-after)$
|
||||
$include-after$
|
||||
|
||||
$endfor$
|
||||
\end{document}
|
224
example/templates/index.ace
Normal file
224
example/templates/index.ace
Normal file
@ -0,0 +1,224 @@
|
||||
= doctype html
|
||||
html lang=en
|
||||
head
|
||||
meta charset=utf-8
|
||||
title {{.Project.Name}}
|
||||
link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bulma/0.6.2/css/bulma.min.css"
|
||||
link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bulmaswatch/0.6.2/sandstone/bulmaswatch.min.css"
|
||||
script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.22.0/moment.min.js"
|
||||
script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/later/1.2.0/later.min.js"
|
||||
script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/prettycron/0.11.0/prettycron.min.js"
|
||||
meta name="viewport" content="width=device-width, initial-scale=1"
|
||||
= css
|
||||
= javascript
|
||||
document.addEventListener("DOMContentLoaded", function(event) {
|
||||
document.querySelectorAll('.cron').forEach(function(el) {
|
||||
el.innerHTML = prettyCron.toString(el.innerHTML)
|
||||
})
|
||||
})
|
||||
|
||||
function show(name) {
|
||||
var items = document.getElementsByClassName('top-nav')
|
||||
for (var i=0; i<items.length; i++) {
|
||||
var item = items[i]
|
||||
if (item.tagName === "LI") {
|
||||
// navigation tabs
|
||||
if (item.classList.contains(name)) {
|
||||
item.classList.add('is-active')
|
||||
} else {
|
||||
item.classList.remove('is-active')
|
||||
}
|
||||
} else {
|
||||
// sections
|
||||
if (item.id === name) {
|
||||
item.classList.remove('is-hidden')
|
||||
} else {
|
||||
item.classList.add('is-hidden')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
body
|
||||
section.hero.is-primary.is-small
|
||||
.hero-body
|
||||
.container
|
||||
h1.title {{.Project.Name}}
|
||||
p.subtitle Policy, Procedure, and Audit Status
|
||||
.hero-foot
|
||||
nav.tabs.is-boxed.is-fullwidth
|
||||
.container
|
||||
ul.is-size-4
|
||||
li.top-nav.overview
|
||||
strong
|
||||
a onclick="javascript:show('overview')" Overview
|
||||
li.top-nav.narratives
|
||||
strong
|
||||
a onclick="javascript:show('narratives')" Narratives
|
||||
li.top-nav.policies
|
||||
strong
|
||||
a onclick="javascript:show('policies')" Policies
|
||||
li.top-nav.procedures
|
||||
strong
|
||||
a onclick="javascript:show('procedures')" Procedures
|
||||
li.top-nav.standards
|
||||
strong
|
||||
a onclick="javascript:show('standards')" Standards
|
||||
/ li.top-nav.evidence
|
||||
/ a onclick="javascript:show('evidence')" Evidence Vault
|
||||
#overview.section.top-nav.container.content
|
||||
blockquote
|
||||
h3 This site consolidates all documents related to the {{.Project.Name}}
|
||||
hr
|
||||
.columns.is-vcentered
|
||||
.column.is-one-third
|
||||
div
|
||||
p.subtitle.is-3.has-text-centered Control Tracking
|
||||
.column.has-text-centered
|
||||
div
|
||||
p.heading Satisfied Controls
|
||||
p.title
|
||||
{{.Stats.ControlsSatisfied}}
|
||||
.column.has-text-centered
|
||||
div
|
||||
p.heading Total Controls
|
||||
p.title
|
||||
{{.Stats.ControlsTotal}}
|
||||
.columns.is-vcentered
|
||||
.column.is-one-third
|
||||
div
|
||||
p.subtitle.is-3.has-text-centered Procedure Tracking
|
||||
.column.has-text-centered
|
||||
div
|
||||
p.heading Active Tickets
|
||||
p.title
|
||||
a target=_blank href="https://github.com/strongdm/comply/issues?q=is%3Aissue+is%3Aopen+label%3Acomply+label%3Aprocess"
|
||||
{{.Stats.ProcessOpen}}
|
||||
.column.has-text-centered
|
||||
div
|
||||
p.heading Oldest Ticket
|
||||
p.title
|
||||
a {{.Stats.ProcessOldestDays}} days
|
||||
.columns.is-vcentered
|
||||
.column.is-one-third
|
||||
div.has-text-centered
|
||||
p.subtitle.is-3 Audit Tracking
|
||||
.column.has-text-centered
|
||||
div
|
||||
p.heading Open Requests
|
||||
p.title
|
||||
a target=_blank href="https://github.com/strongdm/comply/issues?q=is%3Aissue+is%3Aopen+label%3Acomply+label%3Aaudit"
|
||||
{{.Stats.AuditOpen}}
|
||||
.column.has-text-centered
|
||||
div
|
||||
p.heading Total Requests
|
||||
p.title
|
||||
a target=_blank href="https://github.com/strongdm/comply/issues?q=is%3Aissue+is%3Aclosed+label%3Acomply+label%3Aaudit"
|
||||
{{.Stats.AuditTotal}}
|
||||
.columns.is-vcentered
|
||||
.column.is-one-third
|
||||
.column.is-two-thirds.has-text-centered
|
||||
/ progress.progress.is-primary value={{.Stats.AuditClosed}} max={{.Stats.AuditTotal}}
|
||||
#narratives.section.top-nav.container.content
|
||||
blockquote
|
||||
h3
|
||||
p
|
||||
strong Narratives
|
||||
| provide an overview of the organization and the compliance environment.
|
||||
table.table.is-size-4
|
||||
thead
|
||||
tr
|
||||
th Name
|
||||
th Acronym
|
||||
th PDF
|
||||
tbody
|
||||
{{range .Narratives }}
|
||||
tr
|
||||
td {{.Name}}
|
||||
td {{.Acronym}}
|
||||
td
|
||||
a href={{.OutputFilename}} target=_blank
|
||||
{{.OutputFilename}}
|
||||
{{end}}
|
||||
#policies.section.top-nav.container.content
|
||||
blockquote
|
||||
h3
|
||||
p
|
||||
strong Policies
|
||||
| govern the behavior of {{.Project.OrganizationName}} employees and contractors.
|
||||
table.table.is-size-4
|
||||
thead
|
||||
tr
|
||||
th Name
|
||||
th Acronym
|
||||
th PDF
|
||||
tbody
|
||||
{{range .Policies }}
|
||||
tr
|
||||
td {{.Name}}
|
||||
td {{.Acronym}}
|
||||
td
|
||||
a href={{.OutputFilename}} target=_blank
|
||||
{{.OutputFilename}}
|
||||
{{end}}
|
||||
#procedures.section.top-nav.container.content
|
||||
blockquote
|
||||
h3
|
||||
p
|
||||
strong Procedures
|
||||
| prescribe specific steps that are taken in response to key events.
|
||||
table.table.is-size-4
|
||||
thead
|
||||
tr
|
||||
th Name
|
||||
th ID
|
||||
th Schedule (cron format)
|
||||
tbody
|
||||
{{range .Procedures }}
|
||||
tr
|
||||
td {{.Name}}
|
||||
td {{.ID}}
|
||||
{{if .Cron}}
|
||||
td.cron {{.Cron}}
|
||||
{{else}}
|
||||
td On demand
|
||||
{{end}}
|
||||
{{end}}
|
||||
#standards.section.top-nav.container.content
|
||||
blockquote
|
||||
h3
|
||||
p
|
||||
strong Standards
|
||||
| specify the controls satisfied by the compliance program.
|
||||
table.table.is-size-4.is-fullwidth
|
||||
thead
|
||||
tr
|
||||
th Control Key
|
||||
th Name
|
||||
th Satisfied?
|
||||
th Satisfied By
|
||||
tbody
|
||||
{{range .Controls }}
|
||||
tr
|
||||
td {{.ControlKey}}
|
||||
td
|
||||
strong {{.Name}}
|
||||
.subtitle {{.Description}}
|
||||
{{if .Satisfied}}
|
||||
td.is-success Yes
|
||||
{{else}}
|
||||
td No
|
||||
{{end}}
|
||||
td
|
||||
{{range .SatisfiedBy}}
|
||||
a.is-size-7 href={{.}} target=_blank
|
||||
{{.}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
|
||||
footer.footer
|
||||
.container
|
||||
.content.has-text-centered
|
||||
p {{.Project.OrganizationName}} Confidential 2018
|
||||
= javascript
|
||||
// commented for development
|
||||
show('overview')
|
116
internal/cli/app.go
Normal file
116
internal/cli/app.go
Normal file
@ -0,0 +1,116 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/client"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
"github.com/strongdm/comply/internal/plugin/github"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
// Version is set by the build system.
|
||||
const Version = "0.0.0-development"
|
||||
|
||||
// Main should be invoked by the main function in the main package.
|
||||
func Main() {
|
||||
err := newApp().Run(os.Args)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func newApp() *cli.App {
|
||||
app := cli.NewApp()
|
||||
app.Name = "comply"
|
||||
app.HideVersion = true
|
||||
app.Version = Version
|
||||
app.Usage = "policy compliance toolkit"
|
||||
|
||||
app.Commands = []cli.Command{
|
||||
initCommand,
|
||||
}
|
||||
|
||||
app.Commands = append(app.Commands, beforeCommand(buildCommand, projectMustExist))
|
||||
app.Commands = append(app.Commands, beforeCommand(schedulerCommand, projectMustExist))
|
||||
app.Commands = append(app.Commands, beforeCommand(serveCommand, projectMustExist))
|
||||
app.Commands = append(app.Commands, beforeCommand(syncCommand, projectMustExist))
|
||||
app.Commands = append(app.Commands, beforeCommand(todoCommand, projectMustExist))
|
||||
|
||||
// Plugins
|
||||
github.Register()
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
func beforeCommand(c cli.Command, bf ...cli.BeforeFunc) cli.Command {
|
||||
c.Before = beforeAll(bf...)
|
||||
return c
|
||||
}
|
||||
|
||||
func beforeAll(bf ...cli.BeforeFunc) cli.BeforeFunc {
|
||||
return func(c *cli.Context) error {
|
||||
for _, f := range bf {
|
||||
if err := f(c); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func feedbackError(message string) error {
|
||||
return errors.New(fmt.Sprintf("\n\nERROR\n=====\n%s\n", message))
|
||||
}
|
||||
|
||||
func projectMustExist(c *cli.Context) error {
|
||||
_, err := ioutil.ReadFile(filepath.Join(config.ProjectRoot(), "comply.yml"))
|
||||
if err != nil {
|
||||
return feedbackError("command must be run from the root of a valid comply project (comply.yml must exist; have you run `comply init`?)")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func dockerMustExist(c *cli.Context) error {
|
||||
dockerErr := fmt.Errorf("Docker must be available in order to run `%s`", c.Command.Name)
|
||||
|
||||
ctx := context.Background()
|
||||
cli, err := client.NewEnvClient()
|
||||
if err != nil {
|
||||
return dockerErr
|
||||
}
|
||||
|
||||
r, err := cli.ImagePull(ctx, "strongdm/pandoc:latest", types.ImagePullOptions{})
|
||||
if err != nil {
|
||||
return dockerErr
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
done := make(chan struct{})
|
||||
defer close(done)
|
||||
|
||||
go func() {
|
||||
// if docker IO takes more than N seconds, notify user we're (likely) downloading the pandoc image
|
||||
longishPull := time.After(time.Second * 6)
|
||||
select {
|
||||
case <-longishPull:
|
||||
fmt.Println("Downloading strongdm/pandoc image (this may take sometime) ...")
|
||||
case <-done:
|
||||
// in this case, the docker pull was quick -- suggesting we already have the container
|
||||
}
|
||||
}()
|
||||
|
||||
// hold function open until all docker IO is complete
|
||||
io.Copy(ioutil.Discard, r)
|
||||
|
||||
return nil
|
||||
}
|
29
internal/cli/build.go
Normal file
29
internal/cli/build.go
Normal file
@ -0,0 +1,29 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/strongdm/comply/internal/render"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
var buildCommand = cli.Command{
|
||||
Name: "build",
|
||||
ShortName: "b",
|
||||
Usage: "generate a static website summarizing the compliance program",
|
||||
Flags: []cli.Flag{
|
||||
cli.BoolFlag{
|
||||
Name: "live, l",
|
||||
Usage: "rebuild static site after filesystem changes",
|
||||
},
|
||||
},
|
||||
Action: buildAction,
|
||||
Before: beforeAll(dockerMustExist),
|
||||
}
|
||||
|
||||
func buildAction(c *cli.Context) error {
|
||||
err := render.Build("output", false)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "build failed")
|
||||
}
|
||||
return nil
|
||||
}
|
4
internal/cli/doc.go
Normal file
4
internal/cli/doc.go
Normal file
@ -0,0 +1,4 @@
|
||||
/*
|
||||
Package cli defines comply commands and arguments.
|
||||
*/
|
||||
package cli
|
153
internal/cli/init.go
Normal file
153
internal/cli/init.go
Normal file
@ -0,0 +1,153 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/manifoldco/promptui"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
"github.com/strongdm/comply/internal/theme"
|
||||
"github.com/urfave/cli"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var initCommand = cli.Command{
|
||||
Name: "init",
|
||||
Usage: "initialize a new compliance repository (interactive)",
|
||||
Action: initAction,
|
||||
}
|
||||
|
||||
func initAction(c *cli.Context) error {
|
||||
fi, _ := ioutil.ReadDir(config.ProjectRoot())
|
||||
if len(fi) > 0 {
|
||||
return errors.New("init must be run from an empty directory")
|
||||
}
|
||||
|
||||
atLeast := func(n int) func(string) error {
|
||||
return func(input string) error {
|
||||
if len(input) < n {
|
||||
return errors.New("Too short")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
noSpaces := func(s string) error {
|
||||
if strings.ContainsAny(s, "\n\t ") {
|
||||
return errors.New("Must not contain spaces")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
prompt := promptui.Prompt{
|
||||
Label: "Organization Name",
|
||||
Validate: atLeast(1),
|
||||
}
|
||||
|
||||
name, err := prompt.Run()
|
||||
if err != nil {
|
||||
fmt.Printf("Prompt failed %v\n", err)
|
||||
return err
|
||||
}
|
||||
|
||||
prompt = promptui.Prompt{
|
||||
Label: "PDF Filename Prefix",
|
||||
Default: strings.Split(name, " ")[0],
|
||||
Validate: noSpaces,
|
||||
}
|
||||
|
||||
prefix, err := prompt.Run()
|
||||
if err != nil {
|
||||
fmt.Printf("Prompt failed %v\n", err)
|
||||
return err
|
||||
}
|
||||
|
||||
chooser := promptui.Select{
|
||||
Label: "Compliance Templates",
|
||||
Items: []string{"SOC2", "Blank"},
|
||||
}
|
||||
|
||||
choice, _, err := chooser.Run()
|
||||
if err != nil {
|
||||
fmt.Printf("Prompt failed %v\n", err)
|
||||
return err
|
||||
}
|
||||
|
||||
themeName := "comply-blank"
|
||||
switch choice {
|
||||
case 0:
|
||||
themeName = "comply-soc2"
|
||||
case 1:
|
||||
themeName = "comply-blank"
|
||||
default:
|
||||
panic("unrecognized selection")
|
||||
}
|
||||
|
||||
chooser = promptui.Select{
|
||||
Label: "Ticket System",
|
||||
Items: []string{"Github", "JIRA"},
|
||||
}
|
||||
|
||||
choice, _, err = chooser.Run()
|
||||
if err != nil {
|
||||
fmt.Printf("Prompt failed %v\n", err)
|
||||
return err
|
||||
}
|
||||
|
||||
ticketing := model.Github
|
||||
|
||||
switch choice {
|
||||
case 0:
|
||||
ticketing = model.Github
|
||||
case 1:
|
||||
ticketing = model.JIRA
|
||||
default:
|
||||
panic("unrecognized selection")
|
||||
}
|
||||
|
||||
ticketConfig := make(map[string]string)
|
||||
|
||||
plugin := model.GetPlugin(ticketing)
|
||||
ticketPrompts := plugin.Prompts()
|
||||
for k, prompt := range ticketPrompts {
|
||||
p := promptui.Prompt{
|
||||
Label: prompt,
|
||||
Validate: atLeast(2),
|
||||
}
|
||||
|
||||
v, err := p.Run()
|
||||
if err != nil {
|
||||
fmt.Printf("Prompt failed: %v\n", err)
|
||||
return err
|
||||
}
|
||||
ticketConfig[k] = v
|
||||
}
|
||||
|
||||
p := config.Project{}
|
||||
p.Name = name
|
||||
p.FilePrefix = prefix
|
||||
p.Tickets = make(map[string]interface{})
|
||||
p.Tickets[string(ticketing)] = ticketConfig
|
||||
|
||||
x, _ := yaml.Marshal(&p)
|
||||
err = ioutil.WriteFile(filepath.Join(config.ProjectRoot(), "comply.yml"), x, os.FileMode(0644))
|
||||
if err != nil {
|
||||
return cli.NewExitError(err, 1)
|
||||
}
|
||||
|
||||
err = theme.SaveTo(themeName, config.ProjectRoot())
|
||||
if err != nil {
|
||||
return cli.NewExitError(err, 1)
|
||||
}
|
||||
|
||||
success := fmt.Sprintf("%s Compliance initialized successfully", name)
|
||||
fmt.Println(strings.Repeat("=", len(success)+2))
|
||||
fmt.Printf("%s %s\n", promptui.IconGood, success)
|
||||
|
||||
return nil
|
||||
}
|
21
internal/cli/scheduler.go
Normal file
21
internal/cli/scheduler.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/strongdm/comply/internal/ticket"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
var schedulerCommand = cli.Command{
|
||||
Name: "scheduler",
|
||||
Usage: "create tickets based on procedure schedule",
|
||||
Action: schedulerAction,
|
||||
Before: projectMustExist,
|
||||
}
|
||||
|
||||
func schedulerAction(c *cli.Context) error {
|
||||
err := syncAction(c)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return ticket.TriggerScheduled()
|
||||
}
|
21
internal/cli/serve.go
Normal file
21
internal/cli/serve.go
Normal file
@ -0,0 +1,21 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/strongdm/comply/internal/render"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
var serveCommand = cli.Command{
|
||||
Name: "serve",
|
||||
Usage: "live updating version of the build command",
|
||||
Action: serveAction,
|
||||
Before: beforeAll(dockerMustExist),
|
||||
}
|
||||
|
||||
func serveAction(c *cli.Context) error {
|
||||
err := render.Build("output", true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
28
internal/cli/sync.go
Normal file
28
internal/cli/sync.go
Normal file
@ -0,0 +1,28 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
var syncCommand = cli.Command{
|
||||
Name: "sync",
|
||||
Usage: "sync ticket status to local cache",
|
||||
Action: syncAction,
|
||||
Before: projectMustExist,
|
||||
}
|
||||
|
||||
func syncAction(c *cli.Context) error {
|
||||
tp := model.GetPlugin(model.Github)
|
||||
tickets, err := tp.FindByTagName("comply")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, t := range tickets {
|
||||
err = model.DB().Write("tickets", t.ID, t)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
68
internal/cli/todo.go
Normal file
68
internal/cli/todo.go
Normal file
@ -0,0 +1,68 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
var todoCommand = cli.Command{
|
||||
Name: "todo",
|
||||
Usage: "list declared vs satisfied compliance controls",
|
||||
Action: todoAction,
|
||||
Before: projectMustExist,
|
||||
}
|
||||
|
||||
func todoAction(c *cli.Context) error {
|
||||
d, err := model.ReadData()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
w := tablewriter.NewWriter(os.Stdout)
|
||||
w.SetHeader([]string{"Standard", "Control", "Satisfied?", "Name"})
|
||||
|
||||
type row struct {
|
||||
standard string
|
||||
controlKey string
|
||||
satisfied string
|
||||
controlName string
|
||||
}
|
||||
|
||||
satisfied := model.ControlsSatisfied(d)
|
||||
|
||||
var rows []row
|
||||
for _, std := range d.Standards {
|
||||
for id, c := range std.Controls {
|
||||
sat := "NO"
|
||||
if _, ok := satisfied[id]; ok {
|
||||
sat = color.GreenString("YES")
|
||||
}
|
||||
|
||||
rows = append(rows, row{
|
||||
standard: std.Name,
|
||||
controlKey: id,
|
||||
satisfied: sat,
|
||||
controlName: c.Name,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(rows, func(i, j int) bool {
|
||||
return rows[i].controlKey < rows[j].controlKey
|
||||
})
|
||||
|
||||
w.SetAutoWrapText(false)
|
||||
|
||||
for _, r := range rows {
|
||||
w.Append([]string{r.standard, r.controlKey, r.satisfied, r.controlName})
|
||||
}
|
||||
|
||||
w.Render()
|
||||
|
||||
return nil
|
||||
}
|
66
internal/config/config.go
Normal file
66
internal/config/config.go
Normal file
@ -0,0 +1,66 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var projectRoot string
|
||||
|
||||
// SetProjectRoot is used by the test suite.
|
||||
func SetProjectRoot(dir string) {
|
||||
projectRoot = dir
|
||||
}
|
||||
|
||||
type Project struct {
|
||||
Name string `yaml:"name"`
|
||||
FilePrefix string `yaml:"filePrefix"`
|
||||
Tickets map[string]interface{} `yaml:"tickets"`
|
||||
}
|
||||
|
||||
// YAML is the parsed contents of ProjectRoot()/config.yml.
|
||||
func YAML() map[interface{}]interface{} {
|
||||
m := make(map[interface{}]interface{})
|
||||
cfgBytes, err := ioutil.ReadFile(filepath.Join(ProjectRoot(), "comply.yml"))
|
||||
if err != nil {
|
||||
panic("unable to load config.yml: " + err.Error())
|
||||
}
|
||||
yaml.Unmarshal(cfgBytes, &m)
|
||||
return m
|
||||
}
|
||||
|
||||
// Exists tests for the presence of a comply configuration file.
|
||||
func Exists() bool {
|
||||
_, err := ioutil.ReadFile(filepath.Join(ProjectRoot(), "comply.yml"))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Config is the parsed contents of ProjectRoot()/config.yml.
|
||||
func Config() Project {
|
||||
p := Project{}
|
||||
cfgBytes, err := ioutil.ReadFile(filepath.Join(ProjectRoot(), "comply.yml"))
|
||||
if err != nil {
|
||||
panic("unable to load config.yml: " + err.Error())
|
||||
}
|
||||
yaml.Unmarshal(cfgBytes, &p)
|
||||
return p
|
||||
}
|
||||
|
||||
// ProjectRoot is the fully-qualified path to the root directory.
|
||||
func ProjectRoot() string {
|
||||
if projectRoot == "" {
|
||||
dir, err := os.Getwd()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
projectRoot = dir
|
||||
}
|
||||
|
||||
return projectRoot
|
||||
}
|
4
internal/config/doc.go
Normal file
4
internal/config/doc.go
Normal file
@ -0,0 +1,4 @@
|
||||
/*
|
||||
Package config provides access to the comply.yml file.
|
||||
*/
|
||||
package config
|
6
internal/model/audit.go
Normal file
6
internal/model/audit.go
Normal file
@ -0,0 +1,6 @@
|
||||
package model
|
||||
|
||||
type Audit struct {
|
||||
ID string
|
||||
Name string
|
||||
}
|
36
internal/model/db.go
Normal file
36
internal/model/db.go
Normal file
@ -0,0 +1,36 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/nanobox-io/golang-scribble"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
)
|
||||
|
||||
var dbSingletonOnce sync.Once
|
||||
var dbSingleton *scribble.Driver
|
||||
|
||||
// DB provides a singleton reference to a local json cache; will panic if storage location is not writeable.
|
||||
func DB() *scribble.Driver {
|
||||
dbSingletonOnce.Do(func() {
|
||||
if _, err := os.Stat(filepath.Join(config.ProjectRoot(), ".comply", "cache")); os.IsNotExist(err) {
|
||||
err = os.Mkdir(filepath.Join(config.ProjectRoot(), ".comply"), os.FileMode(0755))
|
||||
if err != nil {
|
||||
panic("could not create directory .comply: " + err.Error())
|
||||
}
|
||||
err = os.Mkdir(filepath.Join(config.ProjectRoot(), ".comply", "cache"), os.FileMode(0755))
|
||||
if err != nil {
|
||||
panic("could not create directory .comply/cache: " + err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
db, err := scribble.New(filepath.Join(config.ProjectRoot(), ".comply", "cache"), nil)
|
||||
if err != nil {
|
||||
panic("unable to load comply data: " + err.Error())
|
||||
}
|
||||
dbSingleton = db
|
||||
})
|
||||
return dbSingleton
|
||||
}
|
35
internal/model/db_test.go
Normal file
35
internal/model/db_test.go
Normal file
@ -0,0 +1,35 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
)
|
||||
|
||||
func TestSaveGet(t *testing.T) {
|
||||
dir := os.TempDir()
|
||||
config.SetProjectRoot(dir)
|
||||
f, err := os.Create(filepath.Join(dir, "config.yml"))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
f.Close()
|
||||
|
||||
name := "Do something excellent"
|
||||
err = DB().Write("tickets", "100", &Ticket{ID: "100", Name: name})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ticket := &Ticket{}
|
||||
err = DB().Read("tickets", "100", ticket)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if ticket.Name != name {
|
||||
t.Error("failed to read ticket")
|
||||
}
|
||||
}
|
14
internal/model/doc.go
Normal file
14
internal/model/doc.go
Normal file
@ -0,0 +1,14 @@
|
||||
/*
|
||||
Package model defines the comply data model.
|
||||
|
||||
Markdown Wrappers
|
||||
|
||||
The model package treats typed markdown files as model objects. All wrapped markdown documents are assumed to have a YAML header and a markdown body separated by three dashes: "---".
|
||||
|
||||
Local Ticket Cache
|
||||
|
||||
Tickets are defined externally (in the configured ticketing system), and cached locally for rapid dashboard rendering.
|
||||
|
||||
|
||||
*/
|
||||
package model
|
188
internal/model/fs.go
Normal file
188
internal/model/fs.go
Normal file
@ -0,0 +1,188 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
"github.com/strongdm/comply/internal/path"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// ReadData loads all records from both the filesystem and ticket cache.
|
||||
func ReadData() (*Data, error) {
|
||||
tickets, err := ReadTickets()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
narratives, err := ReadNarratives()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
policies, err := ReadPolicies()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
procedures, err := ReadProcedures()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
standards, err := ReadStandards()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Data{
|
||||
Tickets: tickets,
|
||||
Narratives: narratives,
|
||||
Policies: policies,
|
||||
Procedures: procedures,
|
||||
Standards: standards,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ReadTickets returns all known tickets, or an empty list in the event the ticket cache is empty or unavailable.
|
||||
func ReadTickets() ([]*Ticket, error) {
|
||||
rt, err := DB().ReadAll("tickets")
|
||||
if err != nil {
|
||||
// empty list
|
||||
return []*Ticket{}, nil
|
||||
}
|
||||
return tickets(rt)
|
||||
}
|
||||
|
||||
func tickets(rawTickets []string) ([]*Ticket, error) {
|
||||
var tickets []*Ticket
|
||||
for _, rt := range rawTickets {
|
||||
t := &Ticket{}
|
||||
err := json.Unmarshal([]byte(rt), t)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "malformed ticket JSON")
|
||||
}
|
||||
tickets = append(tickets, t)
|
||||
}
|
||||
return tickets, nil
|
||||
}
|
||||
|
||||
// ReadStandards loads standard definitions from the filesystem.
|
||||
func ReadStandards() ([]*Standard, error) {
|
||||
var standards []*Standard
|
||||
|
||||
files, err := path.Standards()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to enumerate paths")
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
s := &Standard{}
|
||||
sBytes, err := ioutil.ReadFile(f.FullPath)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to read "+f.FullPath)
|
||||
}
|
||||
|
||||
yaml.Unmarshal(sBytes, &s)
|
||||
standards = append(standards, s)
|
||||
}
|
||||
|
||||
return standards, nil
|
||||
}
|
||||
|
||||
// ReadNarratives loads narrative descriptions from the filesystem.
|
||||
func ReadNarratives() ([]*Narrative, error) {
|
||||
var narratives []*Narrative
|
||||
|
||||
files, err := path.Narratives()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to enumerate paths")
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
n := &Narrative{}
|
||||
mdmd := loadMDMD(f.FullPath)
|
||||
err = yaml.Unmarshal([]byte(mdmd.yaml), &n)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to parse "+f.FullPath)
|
||||
}
|
||||
n.Body = mdmd.body
|
||||
n.FullPath = f.FullPath
|
||||
n.ModifiedAt = f.Info.ModTime()
|
||||
n.OutputFilename = fmt.Sprintf("%s-%s.pdf", config.Config().FilePrefix, n.Acronym)
|
||||
narratives = append(narratives, n)
|
||||
}
|
||||
|
||||
return narratives, nil
|
||||
}
|
||||
|
||||
// ReadProcedures loads procedure descriptions from the filesystem.
|
||||
func ReadProcedures() ([]*Procedure, error) {
|
||||
var procedures []*Procedure
|
||||
files, err := path.Procedures()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to enumerate paths")
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
p := &Procedure{}
|
||||
mdmd := loadMDMD(f.FullPath)
|
||||
err = yaml.Unmarshal([]byte(mdmd.yaml), &p)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to parse "+f.FullPath)
|
||||
}
|
||||
p.Body = mdmd.body
|
||||
p.FullPath = f.FullPath
|
||||
p.ModifiedAt = f.Info.ModTime()
|
||||
procedures = append(procedures, p)
|
||||
}
|
||||
|
||||
return procedures, nil
|
||||
}
|
||||
|
||||
// ReadPolicies loads policy documents from the filesystem.
|
||||
func ReadPolicies() ([]*Policy, error) {
|
||||
var policies []*Policy
|
||||
|
||||
files, err := path.Policies()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to enumerate paths")
|
||||
}
|
||||
|
||||
for _, f := range files {
|
||||
p := &Policy{}
|
||||
mdmd := loadMDMD(f.FullPath)
|
||||
err = yaml.Unmarshal([]byte(mdmd.yaml), &p)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to parse "+f.FullPath)
|
||||
}
|
||||
p.Body = mdmd.body
|
||||
p.FullPath = f.FullPath
|
||||
p.ModifiedAt = f.Info.ModTime()
|
||||
p.OutputFilename = fmt.Sprintf("%s-%s.pdf", config.Config().FilePrefix, p.Acronym)
|
||||
policies = append(policies, p)
|
||||
}
|
||||
|
||||
return policies, nil
|
||||
}
|
||||
|
||||
type metadataMarkdown struct {
|
||||
yaml string
|
||||
body string
|
||||
}
|
||||
|
||||
func loadMDMD(path string) metadataMarkdown {
|
||||
bytes, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
content := string(bytes)
|
||||
components := strings.Split(content, "---")
|
||||
if len(components) == 1 {
|
||||
panic(fmt.Sprintf("Malformed metadata markdown in %s, must be of the form: YAML\\n---\\nmarkdown content", path))
|
||||
}
|
||||
yaml := components[0]
|
||||
body := strings.Join(components[1:], "---")
|
||||
return metadataMarkdown{yaml, body}
|
||||
}
|
17
internal/model/model.go
Normal file
17
internal/model/model.go
Normal file
@ -0,0 +1,17 @@
|
||||
package model
|
||||
|
||||
type Data struct {
|
||||
Standards []*Standard
|
||||
Narratives []*Narrative
|
||||
Policies []*Policy
|
||||
Procedures []*Procedure
|
||||
Tickets []*Ticket
|
||||
Audits []*Audit
|
||||
}
|
||||
|
||||
type Revision struct {
|
||||
Date string `yaml:"date"`
|
||||
Comment string `yaml:"comment"`
|
||||
}
|
||||
|
||||
type Satisfaction map[string][]string
|
44
internal/model/model_test.go
Normal file
44
internal/model/model_test.go
Normal file
@ -0,0 +1,44 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMarshal(t *testing.T) {
|
||||
d := Data{
|
||||
Tickets: []*Ticket{
|
||||
&Ticket{
|
||||
ID: "t1",
|
||||
},
|
||||
},
|
||||
Audits: []*Audit{
|
||||
&Audit{
|
||||
ID: "a1",
|
||||
},
|
||||
},
|
||||
Procedures: []*Procedure{
|
||||
&Procedure{
|
||||
Code: "pro1",
|
||||
},
|
||||
},
|
||||
Policies: []*Policy{
|
||||
&Policy{
|
||||
Name: "pol1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
m, _ := json.Marshal(d)
|
||||
|
||||
encoded := string(m)
|
||||
|
||||
if !strings.Contains(encoded, "t1") ||
|
||||
!strings.Contains(encoded, "a1") ||
|
||||
!strings.Contains(encoded, "pro1") ||
|
||||
!strings.Contains(encoded, "pol1") {
|
||||
t.Error("identifier not found in marshalled string")
|
||||
}
|
||||
|
||||
}
|
15
internal/model/narrative.go
Normal file
15
internal/model/narrative.go
Normal file
@ -0,0 +1,15 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type Narrative struct {
|
||||
Name string `yaml:"name"`
|
||||
Acronym string `yaml:"acronym"`
|
||||
|
||||
Revisions []Revision `yaml:"majorRevisions"`
|
||||
Satisfies Satisfaction `yaml:"satisfies"`
|
||||
FullPath string
|
||||
OutputFilename string
|
||||
ModifiedAt time.Time
|
||||
Body string
|
||||
}
|
89
internal/model/plugin.go
Normal file
89
internal/model/plugin.go
Normal file
@ -0,0 +1,89 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
)
|
||||
|
||||
var tsPluginsMu sync.Mutex
|
||||
var tsPlugins = make(map[TicketSystem]TicketPlugin)
|
||||
var tsConfigureOnce sync.Once
|
||||
|
||||
// TicketSystem is the type of ticket database.
|
||||
type TicketSystem string
|
||||
|
||||
const (
|
||||
// JIRA from Atlassian.
|
||||
JIRA = TicketSystem("jira")
|
||||
// Github from Github.
|
||||
Github = TicketSystem("github")
|
||||
)
|
||||
|
||||
// TicketPlugin models support for ticketing systems.
|
||||
type TicketPlugin interface {
|
||||
Get(ID string) (*Ticket, error)
|
||||
FindOpen() ([]*Ticket, error)
|
||||
FindByTag(name, value string) ([]*Ticket, error)
|
||||
FindByTagName(name string) ([]*Ticket, error)
|
||||
Create(ticket *Ticket, labels []string) error
|
||||
Configure(map[string]interface{}) error
|
||||
Prompts() map[string]string
|
||||
}
|
||||
|
||||
// GetPlugin loads the ticketing database.
|
||||
func GetPlugin(ts TicketSystem) TicketPlugin {
|
||||
tsPluginsMu.Lock()
|
||||
defer tsPluginsMu.Unlock()
|
||||
|
||||
tp, ok := tsPlugins[ts]
|
||||
if !ok {
|
||||
panic("Unknown ticket system: " + ts)
|
||||
}
|
||||
|
||||
if config.Exists() {
|
||||
tsConfigureOnce.Do(func() {
|
||||
ticketsMap := config.Config().Tickets
|
||||
|
||||
cfg, ok := ticketsMap[string(ts)]
|
||||
if !ok {
|
||||
spew.Dump(cfg)
|
||||
panic(fmt.Sprintf("Missing configuration for plugin system; add `%s` block to project YAML", string(ts)))
|
||||
}
|
||||
|
||||
cfgTyped, ok := cfg.(map[interface{}]interface{})
|
||||
if !ok {
|
||||
spew.Dump(cfg)
|
||||
panic(fmt.Sprintf("malformatted ticket configuration block `%s` in project YAML", string(ts)))
|
||||
}
|
||||
|
||||
cfgStringed := make(map[string]interface{})
|
||||
for k, v := range cfgTyped {
|
||||
kS, ok := k.(string)
|
||||
if !ok {
|
||||
spew.Dump(cfgStringed)
|
||||
panic(fmt.Sprintf("malformatted key in configuration block `%s` in project YAML", string(ts)))
|
||||
}
|
||||
cfgStringed[kS] = v
|
||||
}
|
||||
|
||||
tp.Configure(cfgStringed)
|
||||
})
|
||||
}
|
||||
|
||||
return tp
|
||||
}
|
||||
|
||||
// Register ticketing system plugin.
|
||||
func Register(ts TicketSystem, plugin TicketPlugin) {
|
||||
tsPluginsMu.Lock()
|
||||
defer tsPluginsMu.Unlock()
|
||||
_, ok := tsPlugins[ts]
|
||||
if ok {
|
||||
panic("Duplicate ticketing system registration: " + ts)
|
||||
}
|
||||
|
||||
tsPlugins[ts] = plugin
|
||||
}
|
15
internal/model/policy.go
Normal file
15
internal/model/policy.go
Normal file
@ -0,0 +1,15 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type Policy struct {
|
||||
Name string `yaml:"name"`
|
||||
Acronym string `yaml:"acronym"`
|
||||
|
||||
Revisions []Revision `yaml:"majorRevisions"`
|
||||
Satisfies Satisfaction `yaml:"satisfies"`
|
||||
FullPath string
|
||||
OutputFilename string
|
||||
ModifiedAt time.Time
|
||||
Body string
|
||||
}
|
16
internal/model/procedure.go
Normal file
16
internal/model/procedure.go
Normal file
@ -0,0 +1,16 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type Procedure struct {
|
||||
Name string `yaml:"name"`
|
||||
ID string `yaml:"id"`
|
||||
Cron string `yaml:"cron"`
|
||||
|
||||
Revisions []Revision `yaml:"majorRevisions"`
|
||||
Satisfies Satisfaction `yaml:"satisfies"`
|
||||
FullPath string
|
||||
OutputFilename string
|
||||
ModifiedAt time.Time
|
||||
Body string
|
||||
}
|
49
internal/model/standard.go
Normal file
49
internal/model/standard.go
Normal file
@ -0,0 +1,49 @@
|
||||
package model
|
||||
|
||||
type Control struct {
|
||||
Family string `yaml:"family"`
|
||||
Name string `yaml:"name"`
|
||||
Description string `yaml:"description"`
|
||||
}
|
||||
|
||||
type Standard struct {
|
||||
Name string `yaml:"name"`
|
||||
Controls map[string]Control `yaml:",inline"`
|
||||
}
|
||||
|
||||
// ControlsSatisfied determines the unique controls currently satisfied by all Narratives, Policies, and Procedures
|
||||
func ControlsSatisfied(data *Data) map[string][]string {
|
||||
satisfied := make(map[string][]string)
|
||||
|
||||
appendSatisfaction := func(in map[string][]string, k string, v string) []string {
|
||||
s, ok := in[k]
|
||||
if !ok {
|
||||
s = make([]string, 0)
|
||||
}
|
||||
s = append(s, v)
|
||||
return s
|
||||
}
|
||||
|
||||
for _, n := range data.Narratives {
|
||||
for _, controlKeys := range n.Satisfies {
|
||||
for _, key := range controlKeys {
|
||||
satisfied[key] = appendSatisfaction(satisfied, key, n.OutputFilename)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, n := range data.Policies {
|
||||
for _, controlKeys := range n.Satisfies {
|
||||
for _, key := range controlKeys {
|
||||
satisfied[key] = appendSatisfaction(satisfied, key, n.OutputFilename)
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, n := range data.Procedures {
|
||||
for _, controlKeys := range n.Satisfies {
|
||||
for _, key := range controlKeys {
|
||||
satisfied[key] = appendSatisfaction(satisfied, key, n.OutputFilename)
|
||||
}
|
||||
}
|
||||
}
|
||||
return satisfied
|
||||
}
|
65
internal/model/ticket.go
Normal file
65
internal/model/ticket.go
Normal file
@ -0,0 +1,65 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type TicketState string
|
||||
|
||||
const (
|
||||
Open = TicketState("open")
|
||||
Closed = TicketState("closed")
|
||||
)
|
||||
|
||||
type Ticket struct {
|
||||
ID string
|
||||
Name string
|
||||
State TicketState
|
||||
Body string
|
||||
Attributes map[string]interface{}
|
||||
ClosedAt *time.Time
|
||||
CreatedAt *time.Time
|
||||
UpdatedAt *time.Time
|
||||
}
|
||||
|
||||
func (t *Ticket) ProcedureID() string {
|
||||
md := t.metadata()
|
||||
if v, ok := md["Procedure-ID"]; ok {
|
||||
return v
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (t *Ticket) metadata() map[string]string {
|
||||
md := make(map[string]string)
|
||||
lines := strings.Split(t.Body, "\n")
|
||||
for _, line := range lines {
|
||||
// TODO: transition to RFC822 parsing
|
||||
if strings.Contains(line, ":") {
|
||||
tokens := strings.Split(line, ":")
|
||||
if len(tokens) != 2 {
|
||||
continue
|
||||
}
|
||||
md[strings.TrimSpace(tokens[0])] = strings.TrimSpace(tokens[1])
|
||||
}
|
||||
}
|
||||
return md
|
||||
}
|
||||
|
||||
func (t *Ticket) SetBool(name string) {
|
||||
t.Attributes[name] = true
|
||||
}
|
||||
func (t *Ticket) Bool(name string) bool {
|
||||
bi, ok := t.Attributes[name]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
b, ok := bi.(bool)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return b
|
||||
}
|
4
internal/path/doc.go
Normal file
4
internal/path/doc.go
Normal file
@ -0,0 +1,4 @@
|
||||
/*
|
||||
Package path provides convenient access to comply project path conventions.
|
||||
*/
|
||||
package path
|
55
internal/path/path.go
Normal file
55
internal/path/path.go
Normal file
@ -0,0 +1,55 @@
|
||||
package path
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// File wraps an os.FileInfo as well as the absolute path to the underlying file.
|
||||
type File struct {
|
||||
FullPath string
|
||||
Info os.FileInfo
|
||||
}
|
||||
|
||||
// Standards lists all standard files.
|
||||
func Standards() ([]File, error) {
|
||||
return filesFor("standards", "yml")
|
||||
}
|
||||
|
||||
// Narratives lists all narrative files.
|
||||
func Narratives() ([]File, error) {
|
||||
return filesFor("narratives", "md")
|
||||
}
|
||||
|
||||
// Policies lists all policy files.
|
||||
func Policies() ([]File, error) {
|
||||
return filesFor("policies", "md")
|
||||
}
|
||||
|
||||
// Procedures lists all procedure files.
|
||||
func Procedures() ([]File, error) {
|
||||
return filesFor("procedures", "md")
|
||||
}
|
||||
|
||||
func filesFor(name, extension string) ([]File, error) {
|
||||
var filtered []File
|
||||
files, err := ioutil.ReadDir(filepath.Join(".", name))
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to load files for: "+name)
|
||||
}
|
||||
for _, f := range files {
|
||||
if !strings.HasSuffix(f.Name(), "."+extension) || strings.HasPrefix(strings.ToUpper(f.Name()), "README") {
|
||||
continue
|
||||
}
|
||||
abs, err := filepath.Abs(filepath.Join(".", name, f.Name()))
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to load file: "+f.Name())
|
||||
}
|
||||
filtered = append(filtered, File{abs, f})
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
179
internal/plugin/github/github.go
Normal file
179
internal/plugin/github/github.go
Normal file
@ -0,0 +1,179 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"github.com/google/go-github/github"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
const (
|
||||
cfgToken = "token"
|
||||
cfgUsername = "username"
|
||||
cfgRepo = "repo"
|
||||
)
|
||||
|
||||
var prompts = map[string]string{
|
||||
cfgToken: "GitHub Token",
|
||||
cfgUsername: "GitHub Username",
|
||||
cfgRepo: "GitHub Repository",
|
||||
}
|
||||
|
||||
// Prompts are human-readable configuration element names
|
||||
func (g *githubPlugin) Prompts() map[string]string {
|
||||
return prompts
|
||||
}
|
||||
|
||||
// Register causes the Github plugin to register itself
|
||||
func Register() {
|
||||
model.Register(model.Github, &githubPlugin{})
|
||||
}
|
||||
|
||||
type githubPlugin struct {
|
||||
token string
|
||||
username string
|
||||
reponame string
|
||||
|
||||
clientMu sync.Mutex
|
||||
client *github.Client
|
||||
}
|
||||
|
||||
func (g *githubPlugin) api() *github.Client {
|
||||
g.clientMu.Lock()
|
||||
defer g.clientMu.Unlock()
|
||||
|
||||
if g.client == nil {
|
||||
ts := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: g.token},
|
||||
)
|
||||
|
||||
// get go-github client
|
||||
gh := github.NewClient(oauth2.NewClient(context.Background(), ts))
|
||||
g.client = gh
|
||||
}
|
||||
return g.client
|
||||
}
|
||||
|
||||
func (g *githubPlugin) Get(ID string) (*model.Ticket, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (g *githubPlugin) Configure(cfg map[string]interface{}) error {
|
||||
var err error
|
||||
|
||||
if g.token, err = getCfg(cfg, cfgToken); err != nil {
|
||||
return err
|
||||
}
|
||||
if g.username, err = getCfg(cfg, cfgUsername); err != nil {
|
||||
return err
|
||||
}
|
||||
if g.reponame, err = getCfg(cfg, cfgRepo); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getCfg(cfg map[string]interface{}, k string) (string, error) {
|
||||
v, ok := cfg[k]
|
||||
if !ok {
|
||||
return "", errors.New("Missing key: " + k)
|
||||
}
|
||||
|
||||
vS, ok := v.(string)
|
||||
if !ok {
|
||||
return "", errors.New("Malformatted key: " + k)
|
||||
}
|
||||
return vS, nil
|
||||
}
|
||||
|
||||
func (g *githubPlugin) FindOpen() ([]*model.Ticket, error) {
|
||||
issues, _, err := g.api().Issues.ListByRepo(context.Background(), g.username, g.reponame, &github.IssueListByRepoOptions{
|
||||
State: "open",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error during FindOpen")
|
||||
}
|
||||
|
||||
return toTickets(issues), nil
|
||||
}
|
||||
|
||||
func (g *githubPlugin) FindByTag(name, value string) ([]*model.Ticket, error) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (g *githubPlugin) FindByTagName(name string) ([]*model.Ticket, error) {
|
||||
issues, _, err := g.api().Issues.ListByRepo(context.Background(), g.username, g.reponame, &github.IssueListByRepoOptions{
|
||||
State: "all",
|
||||
Labels: []string{name},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error during FindOpen")
|
||||
}
|
||||
|
||||
return toTickets(issues), nil
|
||||
}
|
||||
|
||||
func (g *githubPlugin) LinkFor(t *model.Ticket) string {
|
||||
return fmt.Sprintf("https://github.com/strongdm/comply/issues/%s", t.ID)
|
||||
}
|
||||
|
||||
func (g *githubPlugin) Links() (string, string) {
|
||||
return fmt.Sprintf("https://github.com/strongdm/comply/issues?q=is%3Aissue+is%3Aclosed+label%3Acomply", g.username, g.reponame),
|
||||
fmt.Sprintf("https://github.com/%s/%s/issues?q=is%3Aissue+is%3Aopen+label%3Acomply", g.username, g.reponame)
|
||||
}
|
||||
|
||||
func (g *githubPlugin) Create(ticket *model.Ticket, labels []string) error {
|
||||
_, _, err := g.api().Issues.Create(context.Background(), g.username, g.reponame, &github.IssueRequest{
|
||||
Title: &ticket.Name,
|
||||
Body: &ticket.Body,
|
||||
Labels: &labels,
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func toTickets(issues []*github.Issue) []*model.Ticket {
|
||||
var tickets []*model.Ticket
|
||||
for _, i := range issues {
|
||||
tickets = append(tickets, toTicket(i))
|
||||
}
|
||||
return tickets
|
||||
}
|
||||
|
||||
func toTicket(i *github.Issue) *model.Ticket {
|
||||
t := &model.Ticket{Attributes: make(map[string]interface{})}
|
||||
t.ID = strconv.Itoa(*i.Number)
|
||||
t.Name = ss(i.Title)
|
||||
t.Body = ss(i.Body)
|
||||
t.CreatedAt = i.CreatedAt
|
||||
t.State = toState(ss(i.State))
|
||||
|
||||
for _, l := range i.Labels {
|
||||
if l.Name != nil {
|
||||
t.SetBool(*l.Name)
|
||||
}
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
func toState(state string) model.TicketState {
|
||||
switch state {
|
||||
case "closed":
|
||||
return model.Closed
|
||||
}
|
||||
return model.Open
|
||||
}
|
||||
|
||||
func ss(s *string) string {
|
||||
if s == nil {
|
||||
return ""
|
||||
}
|
||||
return *s
|
||||
}
|
142
internal/render/controller.go
Normal file
142
internal/render/controller.go
Normal file
@ -0,0 +1,142 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
)
|
||||
|
||||
type project struct {
|
||||
OrganizationName string
|
||||
Name string
|
||||
}
|
||||
|
||||
type stats struct {
|
||||
ControlsTotal int
|
||||
ControlsSatisfied int
|
||||
|
||||
ProcessTotal int
|
||||
ProcessOpen int
|
||||
ProcessOldestDays int
|
||||
|
||||
AuditOpen int
|
||||
AuditClosed int
|
||||
AuditTotal int
|
||||
}
|
||||
|
||||
type renderData struct {
|
||||
// duplicates Project.OrganizationName
|
||||
Name string
|
||||
Project *project
|
||||
Stats *stats
|
||||
Narratives []*model.Narrative
|
||||
Policies []*model.Policy
|
||||
Procedures []*model.Procedure
|
||||
Standards []*model.Standard
|
||||
Tickets []*model.Ticket
|
||||
Controls []*control
|
||||
}
|
||||
|
||||
type control struct {
|
||||
Standard string
|
||||
ControlKey string
|
||||
Name string
|
||||
Description string
|
||||
Satisfied bool
|
||||
SatisfiedBy []string
|
||||
}
|
||||
|
||||
func load() (*model.Data, *renderData, error) {
|
||||
modelData, err := model.ReadData()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
cfg := config.Config()
|
||||
project := &project{
|
||||
OrganizationName: cfg.Name,
|
||||
Name: fmt.Sprintf("%s Compliance Program", cfg.Name),
|
||||
}
|
||||
|
||||
satisfied := model.ControlsSatisfied(modelData)
|
||||
controls := make([]*control, 0)
|
||||
for _, standard := range modelData.Standards {
|
||||
for key, c := range standard.Controls {
|
||||
satisfactions, ok := satisfied[key]
|
||||
satisfied := ok && len(satisfactions) > 0
|
||||
controls = append(controls, &control{
|
||||
Standard: standard.Name,
|
||||
ControlKey: key,
|
||||
Name: c.Name,
|
||||
Description: c.Description,
|
||||
Satisfied: satisfied,
|
||||
SatisfiedBy: satisfactions,
|
||||
})
|
||||
}
|
||||
}
|
||||
sort.Slice(controls, func(i, j int) bool {
|
||||
return controls[i].ControlKey < controls[j].ControlKey
|
||||
})
|
||||
|
||||
rd := &renderData{}
|
||||
rd.Narratives = modelData.Narratives
|
||||
rd.Policies = modelData.Policies
|
||||
rd.Procedures = modelData.Procedures
|
||||
rd.Standards = modelData.Standards
|
||||
rd.Tickets = modelData.Tickets
|
||||
rd.Project = project
|
||||
rd.Name = project.OrganizationName
|
||||
rd.Controls = controls
|
||||
return modelData, rd, nil
|
||||
}
|
||||
|
||||
func loadWithStats() (*model.Data, *renderData, error) {
|
||||
modelData, renderData, err := load()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
addStats(modelData, renderData)
|
||||
return modelData, renderData, nil
|
||||
}
|
||||
|
||||
func addStats(modelData *model.Data, renderData *renderData) {
|
||||
stats := &stats{}
|
||||
|
||||
satisfied := model.ControlsSatisfied(modelData)
|
||||
|
||||
for _, std := range renderData.Standards {
|
||||
stats.ControlsTotal += len(std.Controls)
|
||||
for controlKey := range std.Controls {
|
||||
if _, ok := satisfied[controlKey]; ok {
|
||||
stats.ControlsSatisfied++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, t := range renderData.Tickets {
|
||||
if t.Bool("audit") {
|
||||
stats.AuditTotal++
|
||||
}
|
||||
|
||||
if t.State == model.Open {
|
||||
if t.Bool("process") {
|
||||
stats.ProcessOpen++
|
||||
if t.CreatedAt != nil {
|
||||
age := int(time.Since(*t.CreatedAt).Hours() / float64(24))
|
||||
if stats.ProcessOldestDays < age {
|
||||
stats.ProcessOldestDays = age
|
||||
}
|
||||
}
|
||||
}
|
||||
if t.Bool("audit") {
|
||||
stats.AuditOpen++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
renderData.Stats = stats
|
||||
}
|
4
internal/render/doc.go
Normal file
4
internal/render/doc.go
Normal file
@ -0,0 +1,4 @@
|
||||
/*
|
||||
Package render defines markdown preprocessors, HTML and PDF generation.
|
||||
*/
|
||||
package render
|
80
internal/render/html.go
Normal file
80
internal/render/html.go
Normal file
@ -0,0 +1,80 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/yosssi/ace"
|
||||
)
|
||||
|
||||
const websocketReloader = `<script>
|
||||
(function(){
|
||||
var ws = new WebSocket("ws://localhost:5122/ws")
|
||||
var connected = false
|
||||
ws.onopen = function(e) {
|
||||
connected = true
|
||||
}
|
||||
ws.onclose = function(e) {
|
||||
// reload!
|
||||
if (connected) {
|
||||
window.location=window.location
|
||||
}
|
||||
}
|
||||
})()
|
||||
</script>`
|
||||
|
||||
func html(output string, live bool, errCh chan error, wg *sync.WaitGroup) {
|
||||
for {
|
||||
files, err := ioutil.ReadDir(filepath.Join(".", "templates"))
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, "unable to open template directory")
|
||||
return
|
||||
}
|
||||
|
||||
_, data, err := loadWithStats()
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, "unable to load data")
|
||||
return
|
||||
}
|
||||
|
||||
for _, fileInfo := range files {
|
||||
if !strings.HasSuffix(fileInfo.Name(), ".ace") {
|
||||
continue
|
||||
}
|
||||
|
||||
basename := strings.Replace(fileInfo.Name(), ".ace", "", -1)
|
||||
w, err := os.Create(filepath.Join(output, fmt.Sprintf("%s.html", basename)))
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, "unable to create HTML file")
|
||||
return
|
||||
}
|
||||
|
||||
tpl, err := ace.Load("", filepath.Join("templates", basename), aceOpts)
|
||||
if err != nil {
|
||||
w.Write([]byte("<htmL><body>template error</body></html>"))
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
err = tpl.Execute(w, data)
|
||||
if err != nil {
|
||||
w.Write([]byte("<htmL><body>template error</body></html>"))
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
if live {
|
||||
w.Write([]byte(websocketReloader))
|
||||
}
|
||||
w.Close()
|
||||
}
|
||||
if !live {
|
||||
wg.Done()
|
||||
return
|
||||
}
|
||||
<-subscribe()
|
||||
}
|
||||
}
|
185
internal/render/narrative.go
Normal file
185
internal/render/narrative.go
Normal file
@ -0,0 +1,185 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/client"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
)
|
||||
|
||||
// TODO: refactor and eliminate duplication among narrative, policy renderers
|
||||
func renderNarrativeToDisk(wg *sync.WaitGroup, errOutputCh chan error, data *renderData, narrative *model.Narrative, live bool) {
|
||||
// only files that have been touched
|
||||
if !isNewer(narrative.FullPath, narrative.ModifiedAt) {
|
||||
return
|
||||
}
|
||||
recordModified(narrative.FullPath, narrative.ModifiedAt)
|
||||
|
||||
ctx := context.Background()
|
||||
cli, err := client.NewEnvClient()
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to read Docker environment")
|
||||
return
|
||||
}
|
||||
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to get workding directory")
|
||||
return
|
||||
}
|
||||
|
||||
hc := &container.HostConfig{
|
||||
Binds: []string{pwd + ":/source"},
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go func(p *model.Narrative) {
|
||||
defer wg.Done()
|
||||
|
||||
if live {
|
||||
rel, err := filepath.Rel(config.ProjectRoot(), p.FullPath)
|
||||
if err != nil {
|
||||
rel = p.FullPath
|
||||
}
|
||||
fmt.Printf("%s -> %s\n", rel, filepath.Join("output", p.OutputFilename))
|
||||
}
|
||||
|
||||
outputFilename := p.OutputFilename
|
||||
// save preprocessed markdown
|
||||
err = preprocessNarrative(data, p, filepath.Join(".", "output", outputFilename+".md"))
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to preprocess")
|
||||
return
|
||||
}
|
||||
|
||||
cmd := []string{"--smart", "--toc", "-N", "--template=/source/templates/default.latex", "-o",
|
||||
fmt.Sprintf("/source/output/%s", outputFilename),
|
||||
fmt.Sprintf("/source/output/%s.md", outputFilename)}
|
||||
|
||||
resp, err := cli.ContainerCreate(ctx, &container.Config{
|
||||
Image: "strongdm/pandoc",
|
||||
Cmd: cmd},
|
||||
hc, nil, "")
|
||||
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to create Docker container")
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
timeout := 2 * time.Second
|
||||
cli.ContainerStop(ctx, resp.ID, &timeout)
|
||||
err := cli.ContainerRemove(ctx, resp.ID, types.ContainerRemoveOptions{Force: true})
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to remove container")
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
if err := cli.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{}); err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to start Docker container")
|
||||
return
|
||||
}
|
||||
|
||||
_, err = cli.ContainerWait(ctx, resp.ID)
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "error awaiting Docker container")
|
||||
return
|
||||
}
|
||||
|
||||
_, err = cli.ContainerLogs(ctx, resp.ID, types.ContainerLogsOptions{ShowStdout: true})
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "error reading Docker container logs")
|
||||
return
|
||||
}
|
||||
|
||||
// remove preprocessed markdown
|
||||
err = os.Remove(filepath.Join(".", "output", outputFilename+".md"))
|
||||
if err != nil {
|
||||
errOutputCh <- err
|
||||
return
|
||||
}
|
||||
}(narrative)
|
||||
}
|
||||
|
||||
func preprocessNarrative(data *renderData, pol *model.Narrative, fullPath string) error {
|
||||
cfg := config.Config()
|
||||
|
||||
var w bytes.Buffer
|
||||
bodyTemplate, err := template.New("body").Parse(pol.Body)
|
||||
if err != nil {
|
||||
w.WriteString(fmt.Sprintf("# Error processing template:\n\n%s\n", err.Error()))
|
||||
} else {
|
||||
bodyTemplate.Execute(&w, data)
|
||||
}
|
||||
body := w.String()
|
||||
|
||||
revisionTable := ""
|
||||
satisfiesTable := ""
|
||||
|
||||
// ||Date|Comment|
|
||||
// |---+------|
|
||||
// | 4 Jan 2018 | Initial Version |
|
||||
// Table: Document history
|
||||
|
||||
if len(pol.Satisfies) > 0 {
|
||||
rows := ""
|
||||
for standard, keys := range pol.Satisfies {
|
||||
rows += fmt.Sprintf("| %s | %s |\n", standard, strings.Join(keys, ", "))
|
||||
}
|
||||
satisfiesTable = fmt.Sprintf("|Standard|Controls Satisfied|\n|-------+--------------------------------------------|\n%s\nTable: Control satisfaction\n", rows)
|
||||
}
|
||||
|
||||
if len(pol.Revisions) > 0 {
|
||||
rows := ""
|
||||
for _, rev := range pol.Revisions {
|
||||
rows += fmt.Sprintf("| %s | %s |\n", rev.Date, rev.Comment)
|
||||
}
|
||||
revisionTable = fmt.Sprintf("|Date|Comment|\n|---+--------------------------------------------|\n%s\nTable: Document history\n", rows)
|
||||
}
|
||||
|
||||
doc := fmt.Sprintf(`%% %s
|
||||
%% %s
|
||||
%% %s
|
||||
|
||||
---
|
||||
header-includes: yes
|
||||
head-content: "%s"
|
||||
foot-content: "%s confidential %d"
|
||||
---
|
||||
|
||||
%s
|
||||
|
||||
%s
|
||||
|
||||
\newpage
|
||||
%s`,
|
||||
pol.Name,
|
||||
cfg.Name,
|
||||
fmt.Sprintf("%s %d", pol.ModifiedAt.Month().String(), pol.ModifiedAt.Year()),
|
||||
pol.Name,
|
||||
cfg.Name,
|
||||
time.Now().Year(),
|
||||
satisfiesTable,
|
||||
revisionTable,
|
||||
body,
|
||||
)
|
||||
err = ioutil.WriteFile(fullPath, []byte(doc), os.FileMode(0644))
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unable to write preprocessed narrative to disk")
|
||||
}
|
||||
return nil
|
||||
}
|
49
internal/render/pdf.go
Normal file
49
internal/render/pdf.go
Normal file
@ -0,0 +1,49 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
)
|
||||
|
||||
func pdf(output string, live bool, errCh chan error, wg *sync.WaitGroup) {
|
||||
var pdfWG sync.WaitGroup
|
||||
|
||||
errOutputCh := make(chan error)
|
||||
|
||||
for {
|
||||
_, data, err := loadWithStats()
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, "unable to load data")
|
||||
return
|
||||
}
|
||||
|
||||
policies, err := model.ReadPolicies()
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, "unable to read policies")
|
||||
return
|
||||
}
|
||||
for _, policy := range policies {
|
||||
renderPolicyToDisk(&pdfWG, errOutputCh, data, policy, live)
|
||||
}
|
||||
|
||||
narratives, err := model.ReadNarratives()
|
||||
if err != nil {
|
||||
errCh <- errors.Wrap(err, "unable to read narratives")
|
||||
return
|
||||
}
|
||||
|
||||
for _, narrative := range narratives {
|
||||
renderNarrativeToDisk(&pdfWG, errOutputCh, data, narrative, live)
|
||||
}
|
||||
|
||||
pdfWG.Wait()
|
||||
|
||||
if !live {
|
||||
wg.Done()
|
||||
return
|
||||
}
|
||||
<-subscribe()
|
||||
}
|
||||
}
|
183
internal/render/policy.go
Normal file
183
internal/render/policy.go
Normal file
@ -0,0 +1,183 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/client"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/strongdm/comply/internal/config"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
)
|
||||
|
||||
// TODO: refactor and eliminate duplication among narrative, policy renderers
|
||||
func renderPolicyToDisk(wg *sync.WaitGroup, errOutputCh chan error, data *renderData, policy *model.Policy, live bool) {
|
||||
// only files that have been touched
|
||||
if !isNewer(policy.FullPath, policy.ModifiedAt) {
|
||||
return
|
||||
}
|
||||
recordModified(policy.FullPath, policy.ModifiedAt)
|
||||
|
||||
ctx := context.Background()
|
||||
cli, err := client.NewEnvClient()
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to read Docker environment")
|
||||
return
|
||||
}
|
||||
|
||||
pwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to get workding directory")
|
||||
return
|
||||
}
|
||||
|
||||
hc := &container.HostConfig{
|
||||
Binds: []string{pwd + ":/source"},
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go func(p *model.Policy) {
|
||||
defer wg.Done()
|
||||
|
||||
if live {
|
||||
rel, err := filepath.Rel(config.ProjectRoot(), p.FullPath)
|
||||
if err != nil {
|
||||
rel = p.FullPath
|
||||
}
|
||||
fmt.Printf("%s -> %s\n", rel, filepath.Join("output", p.OutputFilename))
|
||||
}
|
||||
|
||||
outputFilename := p.OutputFilename
|
||||
// save preprocessed markdown
|
||||
err = preprocessPolicy(data, p, filepath.Join(".", "output", outputFilename+".md"))
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to preprocess")
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := cli.ContainerCreate(ctx, &container.Config{
|
||||
Image: "strongdm/pandoc",
|
||||
Cmd: []string{"--smart", "--toc", "-N", "--template=/source/templates/default.latex", "-o",
|
||||
fmt.Sprintf("/source/output/%s", outputFilename),
|
||||
fmt.Sprintf("/source/output/%s.md", outputFilename),
|
||||
},
|
||||
}, hc, nil, "")
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to create Docker container")
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
timeout := 2 * time.Second
|
||||
cli.ContainerStop(ctx, resp.ID, &timeout)
|
||||
err := cli.ContainerRemove(ctx, resp.ID, types.ContainerRemoveOptions{Force: true})
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to remove container")
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
if err := cli.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{}); err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "unable to start Docker container")
|
||||
return
|
||||
}
|
||||
|
||||
_, err = cli.ContainerWait(ctx, resp.ID)
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "error awaiting Docker container")
|
||||
return
|
||||
}
|
||||
|
||||
_, err = cli.ContainerLogs(ctx, resp.ID, types.ContainerLogsOptions{ShowStdout: true})
|
||||
if err != nil {
|
||||
errOutputCh <- errors.Wrap(err, "error reading Docker container logs")
|
||||
return
|
||||
}
|
||||
|
||||
// remove preprocessed markdown
|
||||
err = os.Remove(filepath.Join(".", "output", outputFilename+".md"))
|
||||
if err != nil {
|
||||
errOutputCh <- err
|
||||
return
|
||||
}
|
||||
}(policy)
|
||||
}
|
||||
|
||||
func preprocessPolicy(data *renderData, pol *model.Policy, fullPath string) error {
|
||||
cfg := config.Config()
|
||||
|
||||
var w bytes.Buffer
|
||||
bodyTemplate, err := template.New("body").Parse(pol.Body)
|
||||
if err != nil {
|
||||
w.WriteString(fmt.Sprintf("# Error processing template:\n\n%s\n", err.Error()))
|
||||
} else {
|
||||
bodyTemplate.Execute(&w, data)
|
||||
}
|
||||
body := w.String()
|
||||
|
||||
revisionTable := ""
|
||||
satisfiesTable := ""
|
||||
|
||||
// ||Date|Comment|
|
||||
// |---+------|
|
||||
// | 4 Jan 2018 | Initial Version |
|
||||
// Table: Document history
|
||||
|
||||
if len(pol.Satisfies) > 0 {
|
||||
rows := ""
|
||||
for standard, keys := range pol.Satisfies {
|
||||
rows += fmt.Sprintf("| %s | %s |\n", standard, strings.Join(keys, ", "))
|
||||
}
|
||||
satisfiesTable = fmt.Sprintf("|Standard|Controls Satisfied|\n|-------+--------------------------------------------|\n%s\nTable: Control satisfaction\n", rows)
|
||||
}
|
||||
|
||||
if len(pol.Revisions) > 0 {
|
||||
rows := ""
|
||||
for _, rev := range pol.Revisions {
|
||||
rows += fmt.Sprintf("| %s | %s |\n", rev.Date, rev.Comment)
|
||||
}
|
||||
revisionTable = fmt.Sprintf("|Date|Comment|\n|---+--------------------------------------------|\n%s\nTable: Document history\n", rows)
|
||||
}
|
||||
|
||||
doc := fmt.Sprintf(`%% %s
|
||||
%% %s
|
||||
%% %s
|
||||
|
||||
---
|
||||
header-includes: yes
|
||||
head-content: "%s"
|
||||
foot-content: "%s confidential %d"
|
||||
---
|
||||
|
||||
%s
|
||||
|
||||
%s
|
||||
|
||||
\newpage
|
||||
%s`,
|
||||
pol.Name,
|
||||
cfg.Name,
|
||||
fmt.Sprintf("%s %d", pol.ModifiedAt.Month().String(), pol.ModifiedAt.Year()),
|
||||
pol.Name,
|
||||
cfg.Name,
|
||||
time.Now().Year(),
|
||||
satisfiesTable,
|
||||
revisionTable,
|
||||
body,
|
||||
)
|
||||
err = ioutil.WriteFile(fullPath, []byte(doc), os.FileMode(0644))
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "unable to write preprocessed policy to disk")
|
||||
}
|
||||
return nil
|
||||
}
|
118
internal/render/site.go
Normal file
118
internal/render/site.go
Normal file
@ -0,0 +1,118 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/skratchdot/open-golang/open"
|
||||
"github.com/yosssi/ace"
|
||||
)
|
||||
|
||||
var upgrader = websocket.Upgrader{
|
||||
ReadBufferSize: 1024,
|
||||
WriteBufferSize: 1024,
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
},
|
||||
}
|
||||
|
||||
var aceOpts = &ace.Options{
|
||||
DynamicReload: true,
|
||||
Indent: " ",
|
||||
}
|
||||
|
||||
var watchChMu sync.Mutex
|
||||
var watchCh chan struct{}
|
||||
|
||||
func subscribe() chan struct{} {
|
||||
watchChMu.Lock()
|
||||
defer watchChMu.Unlock()
|
||||
if watchCh == nil {
|
||||
watchCh = make(chan struct{})
|
||||
}
|
||||
return watchCh
|
||||
}
|
||||
|
||||
func broadcast() {
|
||||
watchChMu.Lock()
|
||||
defer watchChMu.Unlock()
|
||||
close(watchCh)
|
||||
watchCh = nil
|
||||
}
|
||||
|
||||
var lastModifiedMu sync.Mutex
|
||||
var lastModified = make(map[string]time.Time)
|
||||
|
||||
func recordModified(path string, t time.Time) {
|
||||
lastModifiedMu.Lock()
|
||||
defer lastModifiedMu.Unlock()
|
||||
|
||||
previous, ok := lastModified[path]
|
||||
if !ok || t.After(previous) {
|
||||
lastModified[path] = t
|
||||
}
|
||||
}
|
||||
|
||||
func isNewer(path string, t time.Time) bool {
|
||||
lastModifiedMu.Lock()
|
||||
defer lastModifiedMu.Unlock()
|
||||
|
||||
previous, ok := lastModified[path]
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
// is tested after previous? Then isNewer is true.
|
||||
return t.After(previous)
|
||||
}
|
||||
|
||||
// Build generates all PDF and HTML output to the target directory with optional live reload.
|
||||
func Build(output string, live bool) error {
|
||||
err := os.RemoveAll(output)
|
||||
if err != nil {
|
||||
errors.Wrap(err, "unable to remove files from output directory")
|
||||
}
|
||||
|
||||
err = os.MkdirAll(output, os.FileMode(0755))
|
||||
if err != nil {
|
||||
errors.Wrap(err, "unable to create output directory")
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
errCh := make(chan error, 0)
|
||||
wgCh := make(chan struct{})
|
||||
|
||||
if live {
|
||||
watch(errCh)
|
||||
}
|
||||
// PDF
|
||||
wg.Add(1)
|
||||
go pdf(output, live, errCh, &wg)
|
||||
|
||||
// HTML
|
||||
wg.Add(1)
|
||||
go html(output, live, errCh, &wg)
|
||||
|
||||
// WG monitor
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(wgCh)
|
||||
}()
|
||||
|
||||
if live {
|
||||
open.Run("output/index.html")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-wgCh:
|
||||
// success
|
||||
case err := <-errCh:
|
||||
return errors.Wrap(err, "error during build")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
50
internal/render/watch.go
Normal file
50
internal/render/watch.go
Normal file
@ -0,0 +1,50 @@
|
||||
package render
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/watcher"
|
||||
)
|
||||
|
||||
func watch(errCh chan error) {
|
||||
b, err := watcher.New(300 * time.Millisecond)
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
b.Add("./templates/")
|
||||
b.Add("./policies/")
|
||||
b.Add("./procedures/")
|
||||
|
||||
b.Add("./.comply/")
|
||||
b.Add("./.comply/cache")
|
||||
b.Add("./.comply/cache/tickets")
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case e := <-b.Errors:
|
||||
errCh <- e
|
||||
case <-b.Events:
|
||||
broadcast()
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
serveWs := func(w http.ResponseWriter, r *http.Request) {
|
||||
ws, err := upgrader.Upgrade(w, r, nil)
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
<-subscribe()
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
ws.Close()
|
||||
}
|
||||
|
||||
http.HandleFunc("/ws", serveWs)
|
||||
go http.ListenAndServe("127.0.0.1:5122", nil)
|
||||
|
||||
return
|
||||
}
|
1
internal/theme/.gitignore
vendored
Normal file
1
internal/theme/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
themes_bindata.go
|
28
internal/theme/theme.go
Normal file
28
internal/theme/theme.go
Normal file
@ -0,0 +1,28 @@
|
||||
package theme
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func SaveTo(themeName, saveDir string) error {
|
||||
for _, name := range AssetNames() {
|
||||
prefix := themeName + "/"
|
||||
if strings.HasPrefix(name, prefix) {
|
||||
outputName := strings.TrimPrefix(name, prefix)
|
||||
assetDir, assetFilename := filepath.Split(outputName)
|
||||
err := os.MkdirAll(filepath.Join(saveDir, assetDir), os.FileMode(0755))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = ioutil.WriteFile(filepath.Join(saveDir, assetDir, assetFilename), MustAsset(name), os.FileMode(0644))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
// TODO
|
||||
return nil
|
||||
}
|
107
internal/ticket/scheduler.go
Normal file
107
internal/ticket/scheduler.go
Normal file
@ -0,0 +1,107 @@
|
||||
package ticket
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/robfig/cron"
|
||||
"github.com/strongdm/comply/internal/model"
|
||||
)
|
||||
|
||||
func byProcessByTime(tickets []*model.Ticket) map[string][]*model.Ticket {
|
||||
result := make(map[string][]*model.Ticket)
|
||||
for _, t := range tickets {
|
||||
processID := t.ProcedureID()
|
||||
if processID == "" {
|
||||
// missing process metadata; skip
|
||||
continue
|
||||
}
|
||||
list, ok := result[processID]
|
||||
if !ok {
|
||||
list = make([]*model.Ticket, 0)
|
||||
}
|
||||
list = append(list, t)
|
||||
sort.Slice(list, func(i, j int) bool {
|
||||
if list[i].CreatedAt == nil || list[j].CreatedAt == nil {
|
||||
return false
|
||||
}
|
||||
return list[i].CreatedAt.Before(*list[j].CreatedAt)
|
||||
})
|
||||
result[processID] = list
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func TriggerScheduled() error {
|
||||
rawTickets, err := model.ReadTickets()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tickets := byProcessByTime(rawTickets)
|
||||
procedures, err := model.ReadProcedures()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, procedure := range procedures {
|
||||
if procedure.Cron == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
procedureID := procedure.ID
|
||||
schedule, err := cron.Parse(procedure.Cron)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
ticketsForProc, ok := tickets[procedureID]
|
||||
if ok {
|
||||
// find most recent one
|
||||
mostRecent := ticketsForProc[len(ticketsForProc)-1]
|
||||
if mostRecent.CreatedAt == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// would another have triggered since?
|
||||
nextTrigger := schedule.Next(*mostRecent.CreatedAt).UTC()
|
||||
if nextTrigger.After(time.Now().UTC()) {
|
||||
// in the future, nothing to do
|
||||
continue
|
||||
}
|
||||
trigger(procedure)
|
||||
} else {
|
||||
// don't go back further than 13 months
|
||||
tooOld := time.Now().Add(-1 * time.Hour * 24 * (365 + 30))
|
||||
// search back one day until triggers
|
||||
triggeredAt := time.Now().Add(-24 * time.Hour).UTC()
|
||||
SEARCH:
|
||||
for {
|
||||
if triggeredAt.Before(tooOld) {
|
||||
break SEARCH
|
||||
}
|
||||
|
||||
candidate := schedule.Next(triggeredAt)
|
||||
// in the future? not far eonugh back yet.
|
||||
if candidate.After(time.Now().UTC()) {
|
||||
triggeredAt = triggeredAt.Add(-24 * time.Hour)
|
||||
continue
|
||||
}
|
||||
|
||||
// is in the past? then trigger.
|
||||
trigger(procedure)
|
||||
break SEARCH
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func trigger(procedure *model.Procedure) {
|
||||
// TODO: don't hardcode GH
|
||||
tp := model.GetPlugin(model.Github)
|
||||
tp.Create(&model.Ticket{
|
||||
Name: procedure.Name,
|
||||
Body: fmt.Sprintf("%s\n\n\n---\nProcedure-ID: %s", procedure.Body, procedure.ID),
|
||||
}, []string{"comply", "comply-procedure"})
|
||||
}
|
0
themes/comply-blank/narratives/.gitkeep
Normal file
0
themes/comply-blank/narratives/.gitkeep
Normal file
0
themes/comply-blank/policies/.gitkeep
Normal file
0
themes/comply-blank/policies/.gitkeep
Normal file
0
themes/comply-blank/procedures/.gitkeep
Normal file
0
themes/comply-blank/procedures/.gitkeep
Normal file
0
themes/comply-blank/standards/.gitkeep
Normal file
0
themes/comply-blank/standards/.gitkeep
Normal file
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user