Skip to content

Commit 5ee17f3

Browse files
committed
Deploy to ZIP download
1 parent f9f405f commit 5ee17f3

File tree

99 files changed

+6962
-2254
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

99 files changed

+6962
-2254
lines changed

grafana/dashboards/dashboard.json

+1,152
Large diffs are not rendered by default.

grafana/provisioning/dashboard.yml

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
2+
apiVersion: 1
3+
4+
providers:
5+
6+
- name: 'trustgraph.ai'
7+
orgId: 1
8+
folder: 'TrustGraph'
9+
folderUid: 'b6c5be90-d432-4df8-aeab-737c7b151228'
10+
type: file
11+
disableDeletion: false
12+
updateIntervalSeconds: 30
13+
allowUiUpdates: true
14+
options:
15+
path: /var/lib/grafana/dashboards
16+
foldersFromFilesStructure: false
17+

grafana/provisioning/datasource.yml

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
apiVersion: 1
2+
3+
prune: true
4+
5+
datasources:
6+
- name: Prometheus
7+
type: prometheus
8+
access: proxy
9+
orgId: 1
10+
# <string> Sets a custom UID to reference this
11+
# data source in other parts of the configuration.
12+
# If not specified, Grafana generates one.
13+
uid: 'f6b18033-5918-4e05-a1ca-4cb30343b129'
14+
15+
url: http://prometheus:9090
16+
17+
basicAuth: false
18+
withCredentials: false
19+
isDefault: true
20+
editable: true
21+

package-lock.json

+1,067-555
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

+4-1
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,13 @@
1111
},
1212
"dependencies": {
1313
"@emotion/styled": "^11.13.0",
14+
"@mui/icons-material": "^6.1.2",
1415
"@mui/material": "^6.0.1",
1516
"@mui/styled-engine": "^6.0.1",
17+
"jszip": "^3.10.1",
1618
"react": "^18.3.1",
17-
"react-dom": "^18.3.1"
19+
"react-dom": "^18.3.1",
20+
"zustand": "^5.0.0-rc.2"
1821
},
1922
"devDependencies": {
2023
"@eslint/js": "^9.9.0",

prometheus/prometheus.yml

+147
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
global:
2+
3+
scrape_interval: 15s # By default, scrape targets every 15 seconds.
4+
5+
# Attach these labels to any time series or alerts when communicating with
6+
# external systems (federation, remote storage, Alertmanager).
7+
external_labels:
8+
monitor: 'trustgraph'
9+
10+
# A scrape configuration containing exactly one endpoint to scrape:
11+
# Here it's Prometheus itself.
12+
scrape_configs:
13+
14+
# The job name is added as a label `job=<job_name>` to any timeseries
15+
# scraped from this config.
16+
17+
- job_name: 'pdf-decoder'
18+
scrape_interval: 5s
19+
static_configs:
20+
- targets:
21+
- 'pdf-decoder:8000'
22+
23+
- job_name: 'chunker'
24+
scrape_interval: 5s
25+
static_configs:
26+
- targets:
27+
- 'chunker:8000'
28+
29+
30+
- job_name: 'vectorize'
31+
scrape_interval: 5s
32+
static_configs:
33+
- targets:
34+
- 'vectorize:8000'
35+
36+
37+
- job_name: 'embeddings'
38+
scrape_interval: 5s
39+
static_configs:
40+
- targets:
41+
- 'embeddings:8000'
42+
43+
44+
- job_name: 'kg-extract-definitions'
45+
scrape_interval: 5s
46+
static_configs:
47+
- targets:
48+
- 'kg-extract-definitions:8000'
49+
50+
51+
- job_name: 'kg-extract-topics'
52+
scrape_interval: 5s
53+
static_configs:
54+
- targets:
55+
- 'kg-extract-topics:8000'
56+
57+
58+
- job_name: 'kg-extract-relationships'
59+
scrape_interval: 5s
60+
static_configs:
61+
- targets:
62+
- 'kg-extract-relationships:8000'
63+
64+
65+
- job_name: 'metering'
66+
scrape_interval: 5s
67+
static_configs:
68+
- targets:
69+
- 'metering:8000'
70+
71+
72+
- job_name: 'metering-rag'
73+
scrape_interval: 5s
74+
static_configs:
75+
- targets:
76+
- 'metering-rag:8000'
77+
78+
79+
- job_name: 'store-graph-embeddings'
80+
scrape_interval: 5s
81+
static_configs:
82+
- targets:
83+
- 'store-graph-embeddings:8000'
84+
85+
86+
- job_name: 'store-triples'
87+
scrape_interval: 5s
88+
static_configs:
89+
- targets:
90+
- 'store-triples:8000'
91+
92+
93+
- job_name: 'text-completion'
94+
scrape_interval: 5s
95+
static_configs:
96+
- targets:
97+
- 'text-completion:8000'
98+
99+
100+
- job_name: 'text-completion-rag'
101+
scrape_interval: 5s
102+
static_configs:
103+
- targets:
104+
- 'text-completion-rag:8000'
105+
106+
107+
- job_name: 'graph-rag'
108+
scrape_interval: 5s
109+
static_configs:
110+
- targets:
111+
- 'graph-rag:8000'
112+
113+
114+
- job_name: 'prompt'
115+
scrape_interval: 5s
116+
static_configs:
117+
- targets:
118+
- 'prompt:8000'
119+
120+
121+
- job_name: 'prompt-rag'
122+
scrape_interval: 5s
123+
static_configs:
124+
- targets:
125+
- 'prompt-rag:8000'
126+
127+
128+
- job_name: 'query-graph-embeddings'
129+
scrape_interval: 5s
130+
static_configs:
131+
- targets:
132+
- 'query-graph-embeddings:8000'
133+
134+
135+
- job_name: 'query-triples'
136+
scrape_interval: 5s
137+
static_configs:
138+
- targets:
139+
- 'query-triples:8000'
140+
141+
142+
- job_name: 'pulsar'
143+
scrape_interval: 5s
144+
static_configs:
145+
- targets:
146+
- 'pulsar:8080'
147+

service

+90-17
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,19 @@ import logging
66
import os
77
import json
88
import yaml
9+
import zipfile
10+
from io import BytesIO
911

1012
logger = logging.getLogger("service")
1113
logger.setLevel(logging.INFO)
1214

1315
class Generator:
1416

15-
def __init__(self, config, base="./templates/"):
17+
def __init__(self, config, base="./templates/", version="0.0.0"):
18+
1619
self.jsonnet_base = base
1720
self.config = config
21+
self.version = f"\"{version}\"".encode("utf-8")
1822

1923
def process(self, config):
2024

@@ -23,25 +27,46 @@ class Generator:
2327

2428
def load(self, dir, filename):
2529

26-
logger.info("Request jsonnet: %s %s", dir, filename)
30+
logger.debug("Request jsonnet: %s %s", dir, filename)
2731

2832
if filename == "config.json" and dir == "":
29-
print("config.json...")
3033
path = os.path.join(".", dir, filename)
3134
return str(path), self.config
3235

36+
if filename == "version.jsonnet" and dir == "./templates/values/":
37+
path = os.path.join(".", dir, filename)
38+
return str(path), self.version
39+
40+
if dir:
41+
candidates = [
42+
os.path.join(".", dir, filename),
43+
os.path.join(".", filename)
44+
]
45+
else:
46+
candidates = [
47+
os.path.join(".", filename)
48+
]
49+
3350
try:
34-
if dir:
35-
path = os.path.join(".", dir, filename)
36-
else:
37-
path = os.path.join(self.jsonnet_base, filename)
3851

39-
logger.debug("Try: %s", path)
40-
with open(path, "rb") as f:
41-
logger.debug("Loaded: %s", path)
42-
return str(path), f.read()
52+
if filename == "vertexai/private.json":
53+
54+
return candidates[0], private_json.encode("utf-8")
4355

56+
for c in candidates:
57+
logger.debug("Try: %s", c)
58+
59+
if os.path.isfile(c):
60+
with open(c, "rb") as f:
61+
logger.debug("Loading: %s", c)
62+
return str(c), f.read()
63+
64+
raise RuntimeError(
65+
f"Could not load file={filename} dir={dir}"
66+
)
67+
4468
except:
69+
4570
path = os.path.join(self.jsonnet_base, filename)
4671
logger.debug("Try: %s", path)
4772
with open(path, "rb") as f:
@@ -98,23 +123,71 @@ class Api:
98123

99124
return web.HTTPNotFound()
100125

126+
def process(
127+
self, config, version="0.11.19", platform="docker-compose",
128+
):
129+
130+
config = config.encode("utf-8")
131+
132+
gen = Generator(config, version=version)
133+
134+
135+
with open(f"./templates/config-to-{platform}.jsonnet", "r") as f:
136+
wrapper = f.read()
137+
138+
processed = gen.process(wrapper)
139+
140+
return processed
141+
101142
async def generate(self, request):
102143

103144
print("Generate...")
145+
104146
config = await request.text()
105147

106148
print(config)
107149

108-
config = config.encode("utf-8")
150+
processed = self.process(config)
151+
y = yaml.dump(processed)
109152

110-
gen = Generator(config)
153+
mem = BytesIO()
111154

112-
with open("./templates/config-loader.jsonnet", "r") as f:
113-
wrapper = f.read()
114-
processed = gen.process(wrapper)
155+
with zipfile.ZipFile(mem, mode='w') as out:
156+
157+
def output(name, content):
158+
logger.info(f"Adding {name}...")
159+
out.writestr(name, content)
160+
161+
fname = "docker-compose.yaml"
162+
163+
output(fname, y)
164+
165+
# Grafana config
166+
with open("grafana/dashboards/dashboard.json") as f:
167+
output(
168+
"grafana/dashboards/dashboard.json",
169+
f.read()
170+
)
171+
172+
with open("grafana/provisioning/dashboard.yml") as f:
173+
output(
174+
"grafana/provisioning/dashboard.yml",
175+
f.read()
176+
)
177+
178+
with open("grafana/provisioning/datasource.yml") as f:
179+
output(
180+
"grafana/provisioning/datasource.yml",
181+
f.read()
182+
)
183+
184+
# Prometheus config
185+
with open("prometheus/prometheus.yml") as f:
186+
output("prometheus/prometheus.yml", f.read())
115187

116188
return web.Response(
117-
text=yaml.dump(processed), content_type = "text/plain"
189+
body=mem.getvalue(),
190+
content_type = "application/octet-stream"
118191
)
119192

120193
def run(self):

0 commit comments

Comments
 (0)