Compare commits

3 Commits

7 changed files with 369 additions and 8 deletions

View File

@@ -112,8 +112,9 @@ EOF
}
env = {
TS_AUTHKEY = "${ts_authkey}"
TS_HOSTNAME = "git"
TS_HOSTNAME = "git"
TS_AUTHKEY = "${ts_oauthsecret}?ephemeral=true"
TS_EXTRA_ARGS = "--advertise-tags=tag:nomad"
}
resources {

View File

@@ -9,7 +9,7 @@ resource "cloudflare_dns_record" "git-othrayte-one" {
resource "nomad_job" "gitea" {
jobspec = templatefile("gitea.nomad.hcl", {
ts_authkey = data.sops_file.secrets.data["tailscale.auth_key"]
ts_oauthsecret = data.sops_file.secrets.data["tailscale.oauthsecret"]
})
}

View File

@@ -3,6 +3,10 @@
Mount the state on the fileshare to 2-nomad-config/.tfstate/
`sudo mount -t cifs //192.168.1.192/appdata/terraform /home/othrayte/Code/infra/2-nomad-config/.tfstate/ -o rw,username=othrayte,password=<pw>,uid=$(id -u),gid=$(id -g)`
# Tailscale Oauth Client
We use a Tailscale oauth client secret to allow our containers to connect to tailscale. We created an oauth client called `nomad` with the `auth_keys` (write) scope for the tag `nomad` and stored the secret in our secrets file.
# Secrets
The secrets file is encrypted using sops and will be automatically decrypted in the terraform provider.

View File

@@ -7,7 +7,7 @@
"direct_ip6": "ENC[AES256_GCM,data:E/V1pFjBp7c0PRhUa4cxqAVl8xZKsZzn,iv:Gw0qz2x1pMaieZaCcp4dD9sEVtQfcuEqRP3UpA2Bj/0=,tag:LpsPH3cJAlPCFX6EPabWnQ==,type:str]"
},
"tailscale": {
"auth_key": "ENC[AES256_GCM,data:gzh4nqEOQLijp5DTGHHSn0aO1mFQUB3sVSdAVDLG+a2H6XJ0BtJJGU55oLJURy7E/um7gzwDofP5mwZGTA==,iv:yl8lHqnNLB2AXlBfMyw/0CAR7+KmyKKDFc7kxbo9S6c=,tag:CunYd62x3omji6ozqmhgOg==,type:str]"
"oauthsecret": "ENC[AES256_GCM,data:c2GtA+FaDcAKqUtQquP35W650lo1soivNCJc7KzCoQws0hTkt3zICFomOArhIfpHQMnCG4SpNvnXalarKKKxVw==,iv:Pnf8+9wBGNooPl4sKX5aGXITQt7/qfpn+mWyKk8YLXo=,tag:mXL+bz0gESj18qjpdksldA==,type:str]"
},
"authelia": {
"session_secret": "ENC[AES256_GCM,data:eSpAwX/KPzed/Y0oi6QvBwB7Gv5Kiml4FJS5RyuJ7A0plAd8acNThNXi3H4=,iv:RmH0wB3smlSF+CYs4x1w2V9ixdxgdav4dAQntjO0S5g=,tag:Vo5eHiU+1/dep/IUryN/XQ==,type:str]",
@@ -30,8 +30,8 @@
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSByUWM4ZDVVbGFrUGdMRHBX\nUFBmU3Nlc0RBSzhFK0tHNHpkQXUvUVdiZUZJCmpRN1lFdENpWW0rcThjVlVQNUl6\nWnlLU0RnQ3FZby81Ly8xTFBrek9nMncKLS0tIFQ4UTRNOC9CRmx4OFJWem1wckZz\nUDFTSzdWZldFK3FqcTNWTWRyNDhHQ2MKS811mR5xn7qiC/aVgPFYJ5c6Q3zxRfcr\nHcvxUvB01vNJKZpRg92vvKPkV6lQO3DXCT98OdfwiymlEOvYxg71Pg==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2025-05-25T11:33:40Z",
"mac": "ENC[AES256_GCM,data:HvNkVDm3HOcYSAvDNFs0/w/QmiKFTTy0d+Onl/pFXEgdH/bBLqbeOwZV0tsaZYwNJluOH8EiU4gSBZ5EaCh4JrUTpHiiug4p5UXgRSva9sZ5D+9vzvfncqTdQVXKL6gdLMRVJQjz8lZVx0jV1czFES+4AECNgSq7lNRUHhau3eU=,iv:K33uicZwQyscLr1DUEAKLWPkFSH+aIntyceKB1KTu+M=,tag:mrTSWWlv5ZkN4K4HuIE/zw==,type:str]",
"lastmodified": "2025-09-06T12:15:59Z",
"mac": "ENC[AES256_GCM,data:kiyEudOTWXnF485QoODePBGNACuS6bY7KVZZe9oSPe2jnyyNn4oI3ukxsgZDEN48k4sESvSLN+yCCKx4I14oRYHMFRhLSN4YLivQOEp0XcR3w7wx3ONmNdiyMG+UgEquaCX4/lWDFUVfWkoWQeq8y+ap5LY1ocqZ9zJ+yCilCA4=,iv:qyQJi7Uf+JGDiPt0C6Ww4A7Fa6NGL0aD3B/CfB4pEG0=,tag:ci+amgE24/uiEPIT0aoc+A==,type:str]",
"encrypted_regex": "^(.*)$",
"version": "3.10.2"
}

View File

@@ -0,0 +1,345 @@
job "teamsstatus" {
group "app" {
task "teamsstatus" {
driver = "docker"
config {
image = "python:3.11-slim"
command = "/local/start.sh"
}
# Template for the startup script
template {
data = <<EOF
#!/bin/bash
set -e
cd /local
echo "Starting Teams Status Updater service..."
echo "Installing required Python packages..."
pip install msal requests
echo "Running Teams Status Updater script..."
exec python teamsstatus_standalone.py 2>&1
EOF
destination = "local/start.sh"
perms = "755"
}
# Template for the token cache
template {
data = "{{ with nomadVar \"nomad/jobs/teamsstatus\" }}{{ .token_cache_json }}{{ end }}"
destination = "local/token_cache.json"
}
# Template for the Python script
template {
data = <<EOF
import logging
import time
from datetime import datetime, timedelta, timezone
import random
import json
import msal
import requests
import os
import atexit
# Configure logging
logging.basicConfig(level=logging.INFO)
# Embedded journey data
JOURNEY_DATA = '''Day Start Time AEST End Time AEST Start Dist End Dist Start Location End Location
08/06/2025 08:00:00 19:10:00 0km 973km Melbourne Port Pirie SA
09/06/2025 07:30:00 19:35:00 973km 2119km Port Pirie SA Mundrabilla WA
10/06/2025 06:45:00 15:38:00 2119km 2916km Mundrabilla WA Kalgoorlie WA
11/06/2025 10:45:00 17:55:00 2916km 3512km Kalgoorlie WA Perth
13/06/2025 07:00:00 13:58:00 3512km 4083km Perth Kalbarri WA
15/06/2025 07:00:00 16:52:00 4083km 4862km Kalbarri WA Coral Bay WA
18/06/2025 06:00:00 16:52:00 4862km 5554km Coral Bay WA Karijini NP WA
21/06/2025 14:00:00 15:21:00 5554km 5686km Karijini NP WA Karijini NP WA
22/06/2025 06:00:00 16:23:00 5686km 6559km Karijini NP WA Broome WA
23/06/2025 06:00:00 19:10:00 6559km 7688km Broome WA Kununurra WA
27/06/2025 06:00:00 16:29:00 7688km 8593km Kununurra WA Derby WA
28/06/2025 07:00:00 16:06:00 8593km 9358km Derby WA Port Hedland WA
29/06/2025 07:00:00 16:31:00 9358km 10150km Port Hedland WA Exmouth WA
02/07/2025 07:00:00 15:13:00 10150km 10866km Exmouth WA Shark Bay WA
05/07/2025 07:00:00 17:12:00 10866km 11712km Shark Bay WA Fremantle WA
06/07/2025 07:00:00 15:27:00 11712km 12411km Fremantle WA Esperance WA
08/07/2025 06:00:00 18:09:00 12411km 13144km Esperance WA Madura WA
09/07/2025 06:45:00 16:39:00 13144km 13821km Madura WA Ceduna SA
11/07/2025 08:30:00 17:46:00 13821km 14599km Ceduna SA Adelaide
12/07/2025 08:30:00 18:52:00 14599km 15348km Adelaide Melbourne'''
# Global variables for journey segments
_segments = []
def setup_token_cache(cache_filename="token_cache.json"):
"""Set up and return a serializable token cache"""
cache = msal.SerializableTokenCache()
cache.deserialize(open(cache_filename, "r").read())
atexit.register(
lambda: open(cache_filename, "w").write(cache.serialize())
if cache.has_state_changed else None
)
return cache
def get_msal_app(client_id, authority="https://login.microsoftonline.com/organizations", cache_filename="token_cache.json"):
"""Create and return an MSAL PublicClientApplication"""
cache = setup_token_cache(cache_filename)
return msal.PublicClientApplication(
client_id,
authority=authority,
token_cache=cache,
)
def acquire_token(app, scope):
"""Acquire a token using the MSAL app"""
result = None
# Check if user account exists in cache
accounts = app.get_accounts(username=None)
if accounts:
logging.debug("Account(s) exists in cache, probably with token too. Let's try.")
logging.debug("Account(s) already signed in:")
for a in accounts:
logging.debug(a["username"])
chosen = accounts[0] # Assuming the end user chose this one to proceed
logging.debug(f"Automatically using first account: {chosen['username']}")
# Try to find a token in cache for this account
result = app.acquire_token_silent(scope, account=chosen)
# If no suitable token was found, get a new one
if not result:
logging.debug("No suitable token exists in cache. Let's get a new one from AAD.")
print("A local browser window will be open for you to sign in. CTRL+C to cancel.")
result = app.acquire_token_interactive(scope)
# Validate the result
if "access_token" not in result:
logging.error(result.get("error"))
logging.error(result.get("error_description"))
logging.debug(f"Correlation ID: {result.get('correlation_id')}")
return None
return result["access_token"]
def set_teams_status_message(access_token, user_id, status_message, expiration_date_time="2025-06-01T12:00:00", time_zone="UTC"):
"""Set the status message for a Teams user"""
url = f"https://graph.microsoft.com/v1.0/users/{user_id}/presence/microsoft.graph.setStatusMessage"
payload = {
"statusMessage": {
"message": {
"content": status_message + "<pinnednote></pinnednote>",
"contentType": "text",
}
},
"expirationDateTime": {
"dateTime": expiration_date_time,
"timeZone": time_zone
},
}
headers = {
'Authorization': f'Bearer {access_token}',
'Content-Type': 'application/json'
}
logging.debug(f"Setting status message for user {user_id}")
response = requests.post(url, json=payload, headers=headers)
if response.status_code == 200:
logging.info(f"Teams status message set to: {status_message}")
return True
else:
logging.error(f"Failed to set Teams status message: {response.status_code}")
return False
def _load_segments():
"""Load the journey segments from embedded data into memory"""
global _segments
if _segments: # Already loaded
return
aest = timezone(timedelta(hours=10))
for line in JOURNEY_DATA.split('\n')[1:]: # Skip header
day, start_time, end_time, start_dist, end_dist, start_loc, end_loc = line.strip().split('\t')
# Convert day and times to datetime in AEST
day_start = datetime.strptime(f"{day} {start_time}", "%d/%m/%Y %H:%M:%S").replace(tzinfo=aest)
day_end = datetime.strptime(f"{day} {end_time}", "%d/%m/%Y %H:%M:%S").replace(tzinfo=aest)
# Extract the numeric distance values
start_dist = int(start_dist.rstrip('km'))
end_dist = int(end_dist.rstrip('km'))
_segments.append({
'start_time': day_start,
'end_time': day_end,
'start_dist': start_dist,
'end_dist': end_dist,
'start_location': start_loc,
'end_location': end_loc
})
def get_trip_info(target_datetime):
"""Determine the distance travelled and locations for the current datetime."""
if target_datetime.tzinfo is None:
raise ValueError("target_datetime must be timezone-aware")
# Ensure data is loaded
_load_segments()
# Before journey starts
if not _segments or target_datetime < _segments[0]['start_time']:
start_loc = end_loc = _segments[0]['start_location']
return (0, start_loc, end_loc)
# During journey
for i, segment in enumerate(_segments):
# If target is before this segment starts
if target_datetime < segment['start_time']:
prev_segment = _segments[i-1]
return (prev_segment['end_dist'], prev_segment['end_location'], prev_segment['end_location'])
# If target is during this segment, interpolate
if segment['start_time'] <= target_datetime <= segment['end_time']:
# Calculate what fraction of the segment has elapsed
total_seconds = (segment['end_time'] - segment['start_time']).total_seconds()
elapsed_seconds = (target_datetime - segment['start_time']).total_seconds()
fraction = elapsed_seconds / total_seconds
# Interpolate the distance
distance_delta = segment['end_dist'] - segment['start_dist']
current_dist = segment['start_dist'] + int(distance_delta * fraction)
return (current_dist, segment['start_location'], segment['end_location'])
# Between segments
if i < len(_segments) - 1:
next_segment = _segments[i + 1]
if segment['end_time'] < target_datetime < next_segment['start_time']:
return (segment['end_dist'], segment['end_location'], segment['end_location'])
# After journey ends
return (_segments[-1]['end_dist'], _segments[-1]['end_location'], _segments[-1]['end_location'])
def build_message(distance, start_loc, end_loc):
"""Build the status message based on distance and locations"""
message = "On leave"
if distance > 13144:
message += f", driving my EV back from WA"
if distance > 2118:
message += f", driving my EV around WA"
elif distance > 0:
message += f", driving my EV to WA"
if distance > 0:
distance += random.randint(-5, 5)
message += f", {distance}kms travelled so far"
if start_loc != end_loc:
message += f", next stop {end_loc}"
else:
message += f", near {start_loc}"
message += ", returning July 21st. Contacts {CIM: Grant Gorfine, Inserts: Daniel Pate, DevOps: Rob Duncan, else: Andrian Zubovic}"
return message
def main():
test_mode = False # Set to True to run in test mode
time_scale = 1 # 1/600 # Set to 1/60 to run at 1 second per minute, 1 for normal speed
# Set start time to 7:30 AM AEST (UTC+10) on June 8th, 2025
aest = timezone(timedelta(hours=10))
start_time = datetime.now(aest)
date_offset = datetime(2025, 6, 8, 7, 30, 0, tzinfo=aest) - start_time
if test_mode:
logging.info("Running in test mode - status messages will not actually be set")
app = get_msal_app(client_id = "e6cda941-949f-495e-88f5-10eb45ffa0e7")
last_token_refresh = 0
# Token refresh interval (60 minutes in seconds)
TOKEN_REFRESH_INTERVAL = int(60 * 60) # Scale the 1 hour refresh interval
old_distance = -1
while True:
try:
# Check if we need to refresh the token
current_time = time.time()
if current_time - last_token_refresh >= TOKEN_REFRESH_INTERVAL or last_token_refresh == 0:
logging.info("Acquiring/refreshing access token...")
access_token = acquire_token(app, scope = ["https://graph.microsoft.com/Presence.ReadWrite"])
if not access_token:
logging.error("Failed to acquire token")
exit(1)
last_token_refresh = current_time
logging.info("Token successfully refreshed")
# Set the status message
now = datetime.now(aest) # Get current time in AEST
if time_scale != 1:
# Adjust the current time based on the time scale
now = start_time + (now - start_time) / time_scale
now += date_offset # Adjust to the target start time
distance, start_loc, end_loc = get_trip_info(now) # We only need distance for comparison
if distance != old_distance:
message = build_message(distance, start_loc, end_loc)
timestamp = now.strftime("%Y-%m-%d %H:%M:%S %Z")
if not test_mode:
logging.info(f"[{timestamp}] Message: {message}")
success = set_teams_status_message(
access_token = access_token,
user_id = "1b625872-d8a8-42f4-b237-dfa6d8062360",
status_message = message,
)
else:
logging.info(f"[TEST MODE] [{timestamp}] Message: {message}")
success = True
else:
logging.debug("Status message has not changed, skipping update")
success = True
old_distance = distance
if success:
wait_time = 900 * time_scale # Scale the 15 minute wait time
logging.debug(f"Waiting {wait_time} seconds before updating status message again...")
time.sleep(wait_time)
else:
last_token_refresh = 0 # Reset token refresh time on failure
except KeyboardInterrupt:
logging.info("Status update interrupted by user. Exiting...")
break
except Exception as e:
logging.error(f"An error occurred: {e}")
time.sleep(300) # Wait 5 minutes before retrying
return 0
if __name__ == "__main__":
exit(main())
EOF
destination = "local/teamsstatus_standalone.py"
}
resources {
cpu = 500
memory = 256
}
}
restart {
attempts = 3
interval = "5m"
delay = "15s"
mode = "fail"
}
}
}

View File

@@ -0,0 +1,10 @@
resource "nomad_job" "teamsstatus" {
jobspec = file("${path.module}/teamsstatus.nomad.hcl")
}
# resource "nomad_variable" "teamsstatus" {
# path = "nomad/jobs/teamsstatus"
# items = {
# token_cache_json = file("${path.module}/token_cache.json")
# }
# }

View File

@@ -16,6 +16,7 @@ job "traefik" {
service {
name = "traefik"
port = "api"
tags = [
"traefik.enable=true",
@@ -28,7 +29,7 @@ job "traefik" {
check {
name = "alive"
type = "tcp"
port = "http"
port = "api"
interval = "10s"
timeout = "2s"
}