-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmentra.app.json
More file actions
170 lines (162 loc) · 6.08 KB
/
mentra.app.json
File metadata and controls
170 lines (162 loc) · 6.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
{
"$schema": "https://console.mentra.glass/schemas/app-manifest-v1.json",
"package": "com.neuroverse.lenses",
"name": "Lenses",
"tagline": "Pick who you want in your corner.",
"description": "Same AI. Different perspective. Choose a lens — Stoic, Coach, Hype Man, Samurai, and more — and your AI responds through that worldview. Tap to activate, talk, get perspective. You bring your own API key. We never see your data.",
"version": "1.0.0",
"author": {
"name": "NeuroverseOS",
"url": "https://neuroverseos.com",
"email": "apps@neuroverseos.com"
},
"category": "productivity",
"tags": ["ai", "worldview", "lens", "perspective", "companion", "stoic", "life-coach", "nfl-coach", "mindfulness"],
"icon": "assets/icon-lenses.png",
"screenshots": [
"assets/screenshot-lens-picker.png",
"assets/screenshot-stoic-response.png",
"assets/screenshot-hype-man-response.png"
],
"platform": {
"min_sdk_version": "1.0.0",
"supported_glasses": [
"even_realities_g1",
"mentra_live",
"mentra_mach1",
"vuzix_z100"
]
},
"permissions": {
"microphone": {
"required": true,
"reason": "Listens to your speech when you tap to activate, so the AI can respond through your chosen lens. When ambient context is enabled, passively buffers nearby speech (in local RAM only) to give AI conversational context."
},
"display": {
"required": true,
"reason": "Shows AI responses on your glasses display."
},
"camera": {
"required": false,
"reason": "Optional: lets the AI see what you see for visual context (only when you activate it)."
},
"location": {
"required": false,
"reason": "Optional: provides location context for more relevant responses."
}
},
"ai_declaration": {
"uses_ai": true,
"providers": ["openai", "anthropic"],
"key_model": "user_provided",
"data_sent_to_ai": ["transcription", "ambient_transcription_optional", "image_optional", "location_optional"],
"data_retention": "none",
"ai_can_act": "opt_in_only",
"ai_can_act_description": "Proactive perspective mode allows AI to surface insights without user activation. Requires explicit opt-in via Settings > Proactive Frequency (default: off). All ambient invariants still apply. Governed by proactive_opt_in invariant.",
"ai_can_purchase": false,
"ai_can_message": false
},
"governance": {
"engine": "@neuroverseos/governance",
"platform_world": "mentraos-smartglasses.nv-world.md",
"user_rules": true,
"app_world": "lenses-app.nv-world.md",
"user_app_governance": true
},
"settings_schema": {
"ai_provider": {
"type": "select",
"label": "AI Provider",
"options": [
{ "value": "anthropic", "label": "Anthropic (Claude)" },
{ "value": "openai", "label": "OpenAI (GPT)" }
],
"default": "anthropic"
},
"ai_api_key": {
"type": "secret",
"label": "API Key",
"placeholder": "sk-...",
"help": "Your key, your cost, your data. We never store or see it."
},
"active_lens": {
"type": "lens_picker",
"label": "Active Lens",
"default": "stoic"
},
"activation_mode": {
"type": "select",
"label": "How to Activate",
"options": [
{ "value": "tap_hold", "label": "Tap & Hold (temple)" },
{ "value": "double_tap", "label": "Double Tap" },
{ "value": "wake_word", "label": "Say \"Hey Lens\"" },
{ "value": "always_on", "label": "Always Listening" }
],
"default": "tap_hold"
},
"ai_model": {
"type": "select",
"label": "AI Model",
"options": [
{ "value": "auto", "label": "Auto (recommended)" },
{ "value": "claude-sonnet-4-20250514", "label": "Claude Sonnet" },
{ "value": "claude-haiku-4-5-20251001", "label": "Claude Haiku (faster)" },
{ "value": "gpt-4o-mini", "label": "GPT-4o Mini (cheaper)" },
{ "value": "gpt-4o", "label": "GPT-4o" }
],
"default": "auto"
},
"max_response_words": {
"type": "number",
"label": "Max Response Length (words)",
"default": 50,
"min": 5,
"max": 100
},
"ambient_context": {
"type": "toggle",
"label": "Ambient Context",
"default": false,
"help": "When on, the app passively buffers nearby speech in local RAM. When you activate, AI knows what was just said around you. Nothing is stored or sent until you tap.",
"requires_acknowledgment": "ambient_bystander_disclosure"
},
"ambient_buffer_duration": {
"type": "select",
"label": "Ambient Buffer Duration",
"options": [
{ "value": 30, "label": "30 seconds" },
{ "value": 60, "label": "1 minute" },
{ "value": 120, "label": "2 minutes (recommended)" },
{ "value": 300, "label": "5 minutes" }
],
"default": 120,
"help": "How far back the ambient buffer remembers. Longer = more context but higher token cost per activation.",
"depends_on": "ambient_context"
},
"camera_context": {
"type": "toggle",
"label": "Include Camera Context",
"default": false,
"help": "When on, tapping also captures what you see. The AI can reference it."
},
"app_governance": {
"type": "governance_builder",
"label": "App Rules",
"help": "Control what this app can and can't do."
}
},
"disclosures": {
"ambient_bystander_disclosure": {
"title": "Ambient Context & Bystander Privacy",
"body": "When ambient context is on, this app passively transcribes speech near you — including people who haven't consented to being heard by AI. The transcript stays in your device's RAM only. It is never saved to disk or sent anywhere until you explicitly activate. When you do activate, the buffered transcript is sent to your AI provider (using your API key) as part of that single request. Use responsibly.",
"required_before": "ambient_context",
"acknowledgment_stored": "local_only"
}
},
"server": {
"entrypoint": "src/server.ts",
"port": 3000,
"health_check": "/health"
}
}